From 3762ff40e51466bc516939a31732300c8e20211a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:17:09 -0400 Subject: [PATCH 01/59] fix!: [google-cloud-apihub] remove gRPC support for client libraries (#13055) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 670927686 Source-Link: https://github.com/googleapis/googleapis/commit/8c6de209d316e7a33fcd28e743ae893c83b17eed Source-Link: https://github.com/googleapis/googleapis-gen/commit/c3840f8c7d753503d90394b7b1d12f944ebdd501 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzM4NDBmOGM3ZDc1MzUwM2Q5MDM5NGI3YjFkMTJmOTQ0ZWJkZDUwMSJ9 --------- Co-authored-by: Owl Bot --- .../google/cloud/apihub/__init__.py | 26 - .../google/cloud/apihub_v1/__init__.py | 22 +- .../cloud/apihub_v1/gapic_metadata.json | 810 +- .../apihub_v1/services/api_hub/__init__.py | 6 +- .../apihub_v1/services/api_hub/client.py | 4 - .../apihub_v1/services/api_hub/pagers.py | 624 - .../services/api_hub/transports/__init__.py | 6 - .../services/api_hub_dependencies/__init__.py | 6 +- .../services/api_hub_dependencies/client.py | 4 - .../services/api_hub_dependencies/pagers.py | 78 - .../transports/__init__.py | 6 - .../services/api_hub_plugin/__init__.py | 6 +- .../services/api_hub_plugin/client.py | 4 - .../api_hub_plugin/transports/__init__.py | 6 - .../__init__.py | 6 +- .../client.py | 6 - .../pagers.py | 93 - .../transports/__init__.py | 6 - .../services/linting_service/__init__.py | 6 +- .../services/linting_service/client.py | 4 - .../linting_service/transports/__init__.py | 6 - .../services/provisioning/__init__.py | 6 +- .../apihub_v1/services/provisioning/client.py | 4 - .../provisioning/transports/__init__.py | 6 - .../__init__.py | 6 +- .../client.py | 6 - .../pagers.py | 93 - .../transports/__init__.py | 8 - ...b_v1_generated_api_hub_create_api_async.py | 56 - ...enerated_api_hub_create_attribute_async.py | 58 - ...nerated_api_hub_create_deployment_async.py | 60 - ...rated_api_hub_create_external_api_async.py | 56 - ..._v1_generated_api_hub_create_spec_async.py | 58 - ..._generated_api_hub_create_version_async.py | 56 - ...b_v1_generated_api_hub_delete_api_async.py | 50 - ...enerated_api_hub_delete_attribute_async.py | 50 - ...nerated_api_hub_delete_deployment_async.py | 50 - ...rated_api_hub_delete_external_api_async.py | 50 - ..._v1_generated_api_hub_delete_spec_async.py | 50 - ..._generated_api_hub_delete_version_async.py | 50 - ...ub_dependencies_create_dependency_async.py | 57 - ...ub_dependencies_delete_dependency_async.py | 50 - ...i_hub_dependencies_get_dependency_async.py | 52 - ...ub_dependencies_list_dependencies_async.py | 53 - ...ub_dependencies_update_dependency_async.py | 56 - ...ihub_v1_generated_api_hub_get_api_async.py | 52 - ...nerated_api_hub_get_api_operation_async.py | 52 - ...1_generated_api_hub_get_attribute_async.py | 52 - ..._generated_api_hub_get_definition_async.py | 52 - ..._generated_api_hub_get_deployment_async.py | 52 - ...enerated_api_hub_get_external_api_async.py | 52 - ...hub_v1_generated_api_hub_get_spec_async.py | 52 - ...nerated_api_hub_get_spec_contents_async.py | 52 - ..._v1_generated_api_hub_get_version_async.py | 52 - ...rated_api_hub_list_api_operations_async.py | 53 - ...ub_v1_generated_api_hub_list_apis_async.py | 53 - ...generated_api_hub_list_attributes_async.py | 53 - ...enerated_api_hub_list_deployments_async.py | 53 - ...erated_api_hub_list_external_apis_async.py | 53 - ...b_v1_generated_api_hub_list_specs_async.py | 53 - ...1_generated_api_hub_list_versions_async.py | 53 - ...ted_api_hub_plugin_disable_plugin_async.py | 52 - ...ated_api_hub_plugin_enable_plugin_async.py | 52 - ...nerated_api_hub_plugin_get_plugin_async.py | 52 - ...enerated_api_hub_search_resources_async.py | 54 - ...b_v1_generated_api_hub_update_api_async.py | 55 - ...enerated_api_hub_update_attribute_async.py | 57 - ...nerated_api_hub_update_deployment_async.py | 59 - ...rated_api_hub_update_external_api_async.py | 55 - ..._v1_generated_api_hub_update_spec_async.py | 57 - ..._generated_api_hub_update_version_async.py | 55 - ..._create_host_project_registration_async.py | 57 - ...ice_get_host_project_registration_async.py | 52 - ...e_list_host_project_registrations_async.py | 53 - ...d_linting_service_get_style_guide_async.py | 52 - ..._service_get_style_guide_contents_async.py | 52 - ...nerated_linting_service_lint_spec_async.py | 50 - ...inting_service_update_style_guide_async.py | 57 - ...visioning_create_api_hub_instance_async.py | 60 - ...provisioning_get_api_hub_instance_async.py | 52 - ...visioning_lookup_api_hub_instance_async.py | 52 - ...create_runtime_project_attachment_async.py | 57 - ...delete_runtime_project_attachment_async.py | 50 - ...ce_get_runtime_project_attachment_async.py | 52 - ..._list_runtime_project_attachments_async.py | 53 - ...lookup_runtime_project_attachment_async.py | 52 - ...ippet_metadata_google.cloud.apihub.v1.json | 7007 +--- .../unit/gapic/apihub_v1/test_api_hub.py | 34405 ++++------------ .../apihub_v1/test_api_hub_dependencies.py | 7052 +--- .../gapic/apihub_v1/test_api_hub_plugin.py | 4550 +- .../test_host_project_registration_service.py | 5560 +-- .../gapic/apihub_v1/test_linting_service.py | 5347 +-- .../unit/gapic/apihub_v1/test_provisioning.py | 5044 +-- ...test_runtime_project_attachment_service.py | 7199 +--- 94 files changed, 18716 insertions(+), 62412 deletions(-) delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py diff --git a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py index 62b04504759e..72b5c1f8fbe5 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py @@ -18,35 +18,16 @@ __version__ = package_version.__version__ -from google.cloud.apihub_v1.services.api_hub.async_client import ApiHubAsyncClient from google.cloud.apihub_v1.services.api_hub.client import ApiHubClient -from google.cloud.apihub_v1.services.api_hub_dependencies.async_client import ( - ApiHubDependenciesAsyncClient, -) from google.cloud.apihub_v1.services.api_hub_dependencies.client import ( ApiHubDependenciesClient, ) -from google.cloud.apihub_v1.services.api_hub_plugin.async_client import ( - ApiHubPluginAsyncClient, -) from google.cloud.apihub_v1.services.api_hub_plugin.client import ApiHubPluginClient -from google.cloud.apihub_v1.services.host_project_registration_service.async_client import ( - HostProjectRegistrationServiceAsyncClient, -) from google.cloud.apihub_v1.services.host_project_registration_service.client import ( HostProjectRegistrationServiceClient, ) -from google.cloud.apihub_v1.services.linting_service.async_client import ( - LintingServiceAsyncClient, -) from google.cloud.apihub_v1.services.linting_service.client import LintingServiceClient -from google.cloud.apihub_v1.services.provisioning.async_client import ( - ProvisioningAsyncClient, -) from google.cloud.apihub_v1.services.provisioning.client import ProvisioningClient -from google.cloud.apihub_v1.services.runtime_project_attachment_service.async_client import ( - RuntimeProjectAttachmentServiceAsyncClient, -) from google.cloud.apihub_v1.services.runtime_project_attachment_service.client import ( RuntimeProjectAttachmentServiceClient, ) @@ -175,19 +156,12 @@ __all__ = ( "ApiHubClient", - "ApiHubAsyncClient", "ApiHubDependenciesClient", - "ApiHubDependenciesAsyncClient", "ApiHubPluginClient", - "ApiHubPluginAsyncClient", "HostProjectRegistrationServiceClient", - "HostProjectRegistrationServiceAsyncClient", "LintingServiceClient", - "LintingServiceAsyncClient", "ProvisioningClient", - "ProvisioningAsyncClient", "RuntimeProjectAttachmentServiceClient", - "RuntimeProjectAttachmentServiceAsyncClient", "ApiHubResource", "CreateApiRequest", "CreateAttributeRequest", diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py index 27a89617ee68..ddde89662be7 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py @@ -18,20 +18,15 @@ __version__ = package_version.__version__ -from .services.api_hub import ApiHubAsyncClient, ApiHubClient -from .services.api_hub_dependencies import ( - ApiHubDependenciesAsyncClient, - ApiHubDependenciesClient, -) -from .services.api_hub_plugin import ApiHubPluginAsyncClient, ApiHubPluginClient +from .services.api_hub import ApiHubClient +from .services.api_hub_dependencies import ApiHubDependenciesClient +from .services.api_hub_plugin import ApiHubPluginClient from .services.host_project_registration_service import ( - HostProjectRegistrationServiceAsyncClient, HostProjectRegistrationServiceClient, ) -from .services.linting_service import LintingServiceAsyncClient, LintingServiceClient -from .services.provisioning import ProvisioningAsyncClient, ProvisioningClient +from .services.linting_service import LintingServiceClient +from .services.provisioning import ProvisioningClient from .services.runtime_project_attachment_service import ( - RuntimeProjectAttachmentServiceAsyncClient, RuntimeProjectAttachmentServiceClient, ) from .types.apihub_service import ( @@ -158,13 +153,6 @@ ) __all__ = ( - "ApiHubAsyncClient", - "ApiHubDependenciesAsyncClient", - "ApiHubPluginAsyncClient", - "HostProjectRegistrationServiceAsyncClient", - "LintingServiceAsyncClient", - "ProvisioningAsyncClient", - "RuntimeProjectAttachmentServiceAsyncClient", "Api", "ApiHubClient", "ApiHubDependenciesClient", diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json index 079807dd2715..1585fa5ee448 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json @@ -7,7 +7,7 @@ "services": { "ApiHub": { "clients": { - "grpc": { + "rest": { "libraryClient": "ApiHubClient", "rpcs": { "CreateApi": { @@ -186,728 +186,138 @@ ] } } - }, - "grpc-async": { - "libraryClient": "ApiHubAsyncClient", + } + } + }, + "ApiHubDependencies": { + "clients": { + "rest": { + "libraryClient": "ApiHubDependenciesClient", "rpcs": { - "CreateApi": { - "methods": [ - "create_api" - ] - }, - "CreateAttribute": { - "methods": [ - "create_attribute" - ] - }, - "CreateDeployment": { - "methods": [ - "create_deployment" - ] - }, - "CreateExternalApi": { - "methods": [ - "create_external_api" - ] - }, - "CreateSpec": { - "methods": [ - "create_spec" - ] - }, - "CreateVersion": { - "methods": [ - "create_version" - ] - }, - "DeleteApi": { - "methods": [ - "delete_api" - ] - }, - "DeleteAttribute": { - "methods": [ - "delete_attribute" - ] - }, - "DeleteDeployment": { - "methods": [ - "delete_deployment" - ] - }, - "DeleteExternalApi": { - "methods": [ - "delete_external_api" - ] - }, - "DeleteSpec": { - "methods": [ - "delete_spec" - ] - }, - "DeleteVersion": { - "methods": [ - "delete_version" - ] - }, - "GetApi": { - "methods": [ - "get_api" - ] - }, - "GetApiOperation": { - "methods": [ - "get_api_operation" - ] - }, - "GetAttribute": { - "methods": [ - "get_attribute" - ] - }, - "GetDefinition": { - "methods": [ - "get_definition" - ] - }, - "GetDeployment": { - "methods": [ - "get_deployment" - ] - }, - "GetExternalApi": { - "methods": [ - "get_external_api" - ] - }, - "GetSpec": { - "methods": [ - "get_spec" - ] - }, - "GetSpecContents": { - "methods": [ - "get_spec_contents" - ] - }, - "GetVersion": { - "methods": [ - "get_version" - ] - }, - "ListApiOperations": { - "methods": [ - "list_api_operations" - ] - }, - "ListApis": { - "methods": [ - "list_apis" - ] - }, - "ListAttributes": { - "methods": [ - "list_attributes" - ] - }, - "ListDeployments": { - "methods": [ - "list_deployments" - ] - }, - "ListExternalApis": { - "methods": [ - "list_external_apis" - ] - }, - "ListSpecs": { - "methods": [ - "list_specs" - ] - }, - "ListVersions": { + "CreateDependency": { "methods": [ - "list_versions" + "create_dependency" ] }, - "SearchResources": { + "DeleteDependency": { "methods": [ - "search_resources" + "delete_dependency" ] }, - "UpdateApi": { + "GetDependency": { "methods": [ - "update_api" + "get_dependency" ] }, - "UpdateAttribute": { + "ListDependencies": { "methods": [ - "update_attribute" + "list_dependencies" ] }, - "UpdateDeployment": { + "UpdateDependency": { "methods": [ - "update_deployment" + "update_dependency" ] - }, - "UpdateExternalApi": { + } + } + } + } + }, + "ApiHubPlugin": { + "clients": { + "rest": { + "libraryClient": "ApiHubPluginClient", + "rpcs": { + "DisablePlugin": { "methods": [ - "update_external_api" + "disable_plugin" ] }, - "UpdateSpec": { + "EnablePlugin": { "methods": [ - "update_spec" + "enable_plugin" ] }, - "UpdateVersion": { + "GetPlugin": { "methods": [ - "update_version" + "get_plugin" ] } } - }, + } + } + }, + "HostProjectRegistrationService": { + "clients": { "rest": { - "libraryClient": "ApiHubClient", + "libraryClient": "HostProjectRegistrationServiceClient", "rpcs": { - "CreateApi": { + "CreateHostProjectRegistration": { "methods": [ - "create_api" + "create_host_project_registration" ] }, - "CreateAttribute": { + "GetHostProjectRegistration": { "methods": [ - "create_attribute" + "get_host_project_registration" ] }, - "CreateDeployment": { + "ListHostProjectRegistrations": { "methods": [ - "create_deployment" + "list_host_project_registrations" ] - }, - "CreateExternalApi": { + } + } + } + } + }, + "LintingService": { + "clients": { + "rest": { + "libraryClient": "LintingServiceClient", + "rpcs": { + "GetStyleGuide": { "methods": [ - "create_external_api" + "get_style_guide" ] }, - "CreateSpec": { + "GetStyleGuideContents": { "methods": [ - "create_spec" + "get_style_guide_contents" ] }, - "CreateVersion": { + "LintSpec": { "methods": [ - "create_version" + "lint_spec" ] }, - "DeleteApi": { + "UpdateStyleGuide": { "methods": [ - "delete_api" + "update_style_guide" ] - }, - "DeleteAttribute": { + } + } + } + } + }, + "Provisioning": { + "clients": { + "rest": { + "libraryClient": "ProvisioningClient", + "rpcs": { + "CreateApiHubInstance": { "methods": [ - "delete_attribute" + "create_api_hub_instance" ] }, - "DeleteDeployment": { + "GetApiHubInstance": { "methods": [ - "delete_deployment" + "get_api_hub_instance" ] }, - "DeleteExternalApi": { + "LookupApiHubInstance": { "methods": [ - "delete_external_api" - ] - }, - "DeleteSpec": { - "methods": [ - "delete_spec" - ] - }, - "DeleteVersion": { - "methods": [ - "delete_version" - ] - }, - "GetApi": { - "methods": [ - "get_api" - ] - }, - "GetApiOperation": { - "methods": [ - "get_api_operation" - ] - }, - "GetAttribute": { - "methods": [ - "get_attribute" - ] - }, - "GetDefinition": { - "methods": [ - "get_definition" - ] - }, - "GetDeployment": { - "methods": [ - "get_deployment" - ] - }, - "GetExternalApi": { - "methods": [ - "get_external_api" - ] - }, - "GetSpec": { - "methods": [ - "get_spec" - ] - }, - "GetSpecContents": { - "methods": [ - "get_spec_contents" - ] - }, - "GetVersion": { - "methods": [ - "get_version" - ] - }, - "ListApiOperations": { - "methods": [ - "list_api_operations" - ] - }, - "ListApis": { - "methods": [ - "list_apis" - ] - }, - "ListAttributes": { - "methods": [ - "list_attributes" - ] - }, - "ListDeployments": { - "methods": [ - "list_deployments" - ] - }, - "ListExternalApis": { - "methods": [ - "list_external_apis" - ] - }, - "ListSpecs": { - "methods": [ - "list_specs" - ] - }, - "ListVersions": { - "methods": [ - "list_versions" - ] - }, - "SearchResources": { - "methods": [ - "search_resources" - ] - }, - "UpdateApi": { - "methods": [ - "update_api" - ] - }, - "UpdateAttribute": { - "methods": [ - "update_attribute" - ] - }, - "UpdateDeployment": { - "methods": [ - "update_deployment" - ] - }, - "UpdateExternalApi": { - "methods": [ - "update_external_api" - ] - }, - "UpdateSpec": { - "methods": [ - "update_spec" - ] - }, - "UpdateVersion": { - "methods": [ - "update_version" - ] - } - } - } - } - }, - "ApiHubDependencies": { - "clients": { - "grpc": { - "libraryClient": "ApiHubDependenciesClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ApiHubDependenciesAsyncClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - }, - "rest": { - "libraryClient": "ApiHubDependenciesClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - } - } - }, - "ApiHubPlugin": { - "clients": { - "grpc": { - "libraryClient": "ApiHubPluginClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ApiHubPluginAsyncClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - }, - "rest": { - "libraryClient": "ApiHubPluginClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - } - } - }, - "HostProjectRegistrationService": { - "clients": { - "grpc": { - "libraryClient": "HostProjectRegistrationServiceClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - }, - "grpc-async": { - "libraryClient": "HostProjectRegistrationServiceAsyncClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - }, - "rest": { - "libraryClient": "HostProjectRegistrationServiceClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - } - } - }, - "LintingService": { - "clients": { - "grpc": { - "libraryClient": "LintingServiceClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LintingServiceAsyncClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - }, - "rest": { - "libraryClient": "LintingServiceClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - } - } - }, - "Provisioning": { - "clients": { - "grpc": { - "libraryClient": "ProvisioningClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ProvisioningAsyncClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" - ] - } - } - }, - "rest": { - "libraryClient": "ProvisioningClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" + "lookup_api_hub_instance" ] } } @@ -916,66 +326,6 @@ }, "RuntimeProjectAttachmentService": { "clients": { - "grpc": { - "libraryClient": "RuntimeProjectAttachmentServiceClient", - "rpcs": { - "CreateRuntimeProjectAttachment": { - "methods": [ - "create_runtime_project_attachment" - ] - }, - "DeleteRuntimeProjectAttachment": { - "methods": [ - "delete_runtime_project_attachment" - ] - }, - "GetRuntimeProjectAttachment": { - "methods": [ - "get_runtime_project_attachment" - ] - }, - "ListRuntimeProjectAttachments": { - "methods": [ - "list_runtime_project_attachments" - ] - }, - "LookupRuntimeProjectAttachment": { - "methods": [ - "lookup_runtime_project_attachment" - ] - } - } - }, - "grpc-async": { - "libraryClient": "RuntimeProjectAttachmentServiceAsyncClient", - "rpcs": { - "CreateRuntimeProjectAttachment": { - "methods": [ - "create_runtime_project_attachment" - ] - }, - "DeleteRuntimeProjectAttachment": { - "methods": [ - "delete_runtime_project_attachment" - ] - }, - "GetRuntimeProjectAttachment": { - "methods": [ - "get_runtime_project_attachment" - ] - }, - "ListRuntimeProjectAttachments": { - "methods": [ - "list_runtime_project_attachments" - ] - }, - "LookupRuntimeProjectAttachment": { - "methods": [ - "lookup_runtime_project_attachment" - ] - } - } - }, "rest": { "libraryClient": "RuntimeProjectAttachmentServiceClient", "rpcs": { diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py index 8da758214ef9..0d50a3548806 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubAsyncClient from .client import ApiHubClient -__all__ = ( - "ApiHubClient", - "ApiHubAsyncClient", -) +__all__ = ("ApiHubClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py index 69f73e4792ec..77ddc5472962 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import apihub_service, common_fields from .transports.base import DEFAULT_CLIENT_INFO, ApiHubTransport -from .transports.grpc import ApiHubGrpcTransport -from .transports.grpc_asyncio import ApiHubGrpcAsyncIOTransport from .transports.rest import ApiHubRestTransport @@ -71,8 +69,6 @@ class ApiHubClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] - _transport_registry["grpc"] = ApiHubGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py index e5b37bd42192..510192a2f321 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py @@ -115,84 +115,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListApisAsyncPager: - """A pager for iterating through ``list_apis`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListApisResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``apis`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListApis`` requests and continue to iterate - through the ``apis`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListApisResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListApisResponse]], - request: apihub_service.ListApisRequest, - response: apihub_service.ListApisResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListApisRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListApisResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListApisRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListApisResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Api]: - async def async_generator(): - async for page in self.pages: - for response in page.apis: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListVersionsPager: """A pager for iterating through ``list_versions`` requests. @@ -267,84 +189,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListVersionsAsyncPager: - """A pager for iterating through ``list_versions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListVersionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``versions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListVersions`` requests and continue to iterate - through the ``versions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListVersionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListVersionsResponse]], - request: apihub_service.ListVersionsRequest, - response: apihub_service.ListVersionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListVersionsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListVersionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListVersionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListVersionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Version]: - async def async_generator(): - async for page in self.pages: - for response in page.versions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListSpecsPager: """A pager for iterating through ``list_specs`` requests. @@ -419,84 +263,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListSpecsAsyncPager: - """A pager for iterating through ``list_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListSpecsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``specs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSpecs`` requests and continue to iterate - through the ``specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListSpecsResponse]], - request: apihub_service.ListSpecsRequest, - response: apihub_service.ListSpecsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListSpecsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListSpecsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Spec]: - async def async_generator(): - async for page in self.pages: - for response in page.specs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListApiOperationsPager: """A pager for iterating through ``list_api_operations`` requests. @@ -571,84 +337,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListApiOperationsAsyncPager: - """A pager for iterating through ``list_api_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``api_operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListApiOperations`` requests and continue to iterate - through the ``api_operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListApiOperationsResponse]], - request: apihub_service.ListApiOperationsRequest, - response: apihub_service.ListApiOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListApiOperationsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListApiOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListApiOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListApiOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.ApiOperation]: - async def async_generator(): - async for page in self.pages: - for response in page.api_operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListDeploymentsPager: """A pager for iterating through ``list_deployments`` requests. @@ -723,84 +411,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListDeploymentsAsyncPager: - """A pager for iterating through ``list_deployments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deployments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeployments`` requests and continue to iterate - through the ``deployments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListDeploymentsResponse]], - request: apihub_service.ListDeploymentsRequest, - response: apihub_service.ListDeploymentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListDeploymentsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListDeploymentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListDeploymentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListDeploymentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Deployment]: - async def async_generator(): - async for page in self.pages: - for response in page.deployments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListAttributesPager: """A pager for iterating through ``list_attributes`` requests. @@ -875,84 +485,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListAttributesAsyncPager: - """A pager for iterating through ``list_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListAttributesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``attributes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAttributes`` requests and continue to iterate - through the ``attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListAttributesResponse]], - request: apihub_service.ListAttributesRequest, - response: apihub_service.ListAttributesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListAttributesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListAttributesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Attribute]: - async def async_generator(): - async for page in self.pages: - for response in page.attributes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class SearchResourcesPager: """A pager for iterating through ``search_resources`` requests. @@ -1027,84 +559,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class SearchResourcesAsyncPager: - """A pager for iterating through ``search_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``search_results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchResources`` requests and continue to iterate - through the ``search_results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.SearchResourcesResponse]], - request: apihub_service.SearchResourcesRequest, - response: apihub_service.SearchResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.SearchResourcesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.SearchResourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.SearchResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.SearchResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[apihub_service.SearchResult]: - async def async_generator(): - async for page in self.pages: - for response in page.search_results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListExternalApisPager: """A pager for iterating through ``list_external_apis`` requests. @@ -1177,81 +631,3 @@ def __iter__(self) -> Iterator[common_fields.ExternalApi]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListExternalApisAsyncPager: - """A pager for iterating through ``list_external_apis`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``external_apis`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListExternalApis`` requests and continue to iterate - through the ``external_apis`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListExternalApisResponse]], - request: apihub_service.ListExternalApisRequest, - response: apihub_service.ListExternalApisResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListExternalApisRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListExternalApisResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListExternalApisRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListExternalApisResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.ExternalApi]: - async def async_generator(): - async for page in self.pages: - for response in page.external_apis: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py index ae6fa9e02afe..904125024a7b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ApiHubTransport -from .grpc import ApiHubGrpcTransport -from .grpc_asyncio import ApiHubGrpcAsyncIOTransport from .rest import ApiHubRestInterceptor, ApiHubRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] -_transport_registry["grpc"] = ApiHubGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubRestTransport __all__ = ( "ApiHubTransport", - "ApiHubGrpcTransport", - "ApiHubGrpcAsyncIOTransport", "ApiHubRestTransport", "ApiHubRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py index 9727d7d5b0d7..146b28fe4729 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubDependenciesAsyncClient from .client import ApiHubDependenciesClient -__all__ = ( - "ApiHubDependenciesClient", - "ApiHubDependenciesAsyncClient", -) +__all__ = ("ApiHubDependenciesClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py index 1c70a2416c8e..70a952fe282c 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import apihub_service, common_fields from .transports.base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport -from .transports.grpc import ApiHubDependenciesGrpcTransport -from .transports.grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport from .transports.rest import ApiHubDependenciesRestTransport @@ -73,8 +71,6 @@ class ApiHubDependenciesClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ApiHubDependenciesTransport]] - _transport_registry["grpc"] = ApiHubDependenciesGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubDependenciesGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubDependenciesRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py index 89cdfff15348..6f0fa634f84d 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py @@ -113,81 +113,3 @@ def __iter__(self) -> Iterator[common_fields.Dependency]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListDependenciesAsyncPager: - """A pager for iterating through ``list_dependencies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``dependencies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDependencies`` requests and continue to iterate - through the ``dependencies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListDependenciesResponse]], - request: apihub_service.ListDependenciesRequest, - response: apihub_service.ListDependenciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListDependenciesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListDependenciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListDependenciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListDependenciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Dependency]: - async def async_generator(): - async for page in self.pages: - for response in page.dependencies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py index 8327d8408a2e..5de2b44a3808 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py @@ -17,22 +17,16 @@ from typing import Dict, Type from .base import ApiHubDependenciesTransport -from .grpc import ApiHubDependenciesGrpcTransport -from .grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport from .rest import ApiHubDependenciesRestInterceptor, ApiHubDependenciesRestTransport # Compile a registry of transports. _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ApiHubDependenciesTransport]] -_transport_registry["grpc"] = ApiHubDependenciesGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubDependenciesGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubDependenciesRestTransport __all__ = ( "ApiHubDependenciesTransport", - "ApiHubDependenciesGrpcTransport", - "ApiHubDependenciesGrpcAsyncIOTransport", "ApiHubDependenciesRestTransport", "ApiHubDependenciesRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py index 3d883e94c9c6..5cbea89992b0 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubPluginAsyncClient from .client import ApiHubPluginClient -__all__ = ( - "ApiHubPluginClient", - "ApiHubPluginAsyncClient", -) +__all__ = ("ApiHubPluginClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py index 5f6283c74876..dbfedb9a41d0 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -54,8 +54,6 @@ from google.cloud.apihub_v1.types import common_fields, plugin_service from .transports.base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport -from .transports.grpc import ApiHubPluginGrpcTransport -from .transports.grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport from .transports.rest import ApiHubPluginRestTransport @@ -68,8 +66,6 @@ class ApiHubPluginClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] - _transport_registry["grpc"] = ApiHubPluginGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubPluginGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubPluginRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py index 33a3043c2375..9ecb3eaee613 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ApiHubPluginTransport -from .grpc import ApiHubPluginGrpcTransport -from .grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport from .rest import ApiHubPluginRestInterceptor, ApiHubPluginRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] -_transport_registry["grpc"] = ApiHubPluginGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubPluginGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubPluginRestTransport __all__ = ( "ApiHubPluginTransport", - "ApiHubPluginGrpcTransport", - "ApiHubPluginGrpcAsyncIOTransport", "ApiHubPluginRestTransport", "ApiHubPluginRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py index f08d02f0c7b6..f5f90e47cdb9 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import HostProjectRegistrationServiceAsyncClient from .client import HostProjectRegistrationServiceClient -__all__ = ( - "HostProjectRegistrationServiceClient", - "HostProjectRegistrationServiceAsyncClient", -) +__all__ = ("HostProjectRegistrationServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py index 89784ae2dd03..2e5897cdad20 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -59,8 +59,6 @@ DEFAULT_CLIENT_INFO, HostProjectRegistrationServiceTransport, ) -from .transports.grpc import HostProjectRegistrationServiceGrpcTransport -from .transports.grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport from .transports.rest import HostProjectRegistrationServiceRestTransport @@ -75,10 +73,6 @@ class HostProjectRegistrationServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] - _transport_registry["grpc"] = HostProjectRegistrationServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = HostProjectRegistrationServiceGrpcAsyncIOTransport _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py index 2248115ec790..4bb7e2ec7541 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py @@ -125,96 +125,3 @@ def __iter__( def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListHostProjectRegistrationsAsyncPager: - """A pager for iterating through ``list_host_project_registrations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``host_project_registrations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListHostProjectRegistrations`` requests and continue to iterate - through the ``host_project_registrations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[ - ..., - Awaitable[ - host_project_registration_service.ListHostProjectRegistrationsResponse - ], - ], - request: host_project_registration_service.ListHostProjectRegistrationsRequest, - response: host_project_registration_service.ListHostProjectRegistrationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest( - request - ) - ) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages( - self, - ) -> AsyncIterator[ - host_project_registration_service.ListHostProjectRegistrationsResponse - ]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__( - self, - ) -> AsyncIterator[host_project_registration_service.HostProjectRegistration]: - async def async_generator(): - async for page in self.pages: - for response in page.host_project_registrations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py index 2352c478fef0..c80657406ff6 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py @@ -17,8 +17,6 @@ from typing import Dict, Type from .base import HostProjectRegistrationServiceTransport -from .grpc import HostProjectRegistrationServiceGrpcTransport -from .grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport from .rest import ( HostProjectRegistrationServiceRestInterceptor, HostProjectRegistrationServiceRestTransport, @@ -28,14 +26,10 @@ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] -_transport_registry["grpc"] = HostProjectRegistrationServiceGrpcTransport -_transport_registry["grpc_asyncio"] = HostProjectRegistrationServiceGrpcAsyncIOTransport _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport __all__ = ( "HostProjectRegistrationServiceTransport", - "HostProjectRegistrationServiceGrpcTransport", - "HostProjectRegistrationServiceGrpcAsyncIOTransport", "HostProjectRegistrationServiceRestTransport", "HostProjectRegistrationServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py index 70436549c9b7..68f5fe54993b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import LintingServiceAsyncClient from .client import LintingServiceClient -__all__ = ( - "LintingServiceClient", - "LintingServiceAsyncClient", -) +__all__ = ("LintingServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py index 75930326f9d9..608153448c3b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -55,8 +55,6 @@ from google.cloud.apihub_v1.types import common_fields, linting_service from .transports.base import DEFAULT_CLIENT_INFO, LintingServiceTransport -from .transports.grpc import LintingServiceGrpcTransport -from .transports.grpc_asyncio import LintingServiceGrpcAsyncIOTransport from .transports.rest import LintingServiceRestTransport @@ -71,8 +69,6 @@ class LintingServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[LintingServiceTransport]] - _transport_registry["grpc"] = LintingServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LintingServiceGrpcAsyncIOTransport _transport_registry["rest"] = LintingServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py index d9f3131d4481..f8d2f54aac8c 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import LintingServiceTransport -from .grpc import LintingServiceGrpcTransport -from .grpc_asyncio import LintingServiceGrpcAsyncIOTransport from .rest import LintingServiceRestInterceptor, LintingServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LintingServiceTransport]] -_transport_registry["grpc"] = LintingServiceGrpcTransport -_transport_registry["grpc_asyncio"] = LintingServiceGrpcAsyncIOTransport _transport_registry["rest"] = LintingServiceRestTransport __all__ = ( "LintingServiceTransport", - "LintingServiceGrpcTransport", - "LintingServiceGrpcAsyncIOTransport", "LintingServiceRestTransport", "LintingServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py index 894b56bb9b82..3df245148ed6 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ProvisioningAsyncClient from .client import ProvisioningClient -__all__ = ( - "ProvisioningClient", - "ProvisioningAsyncClient", -) +__all__ = ("ProvisioningClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py index c39738ccb878..56a83e91bd00 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import common_fields, provisioning_service from .transports.base import DEFAULT_CLIENT_INFO, ProvisioningTransport -from .transports.grpc import ProvisioningGrpcTransport -from .transports.grpc_asyncio import ProvisioningGrpcAsyncIOTransport from .transports.rest import ProvisioningRestTransport @@ -71,8 +69,6 @@ class ProvisioningClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] - _transport_registry["grpc"] = ProvisioningGrpcTransport - _transport_registry["grpc_asyncio"] = ProvisioningGrpcAsyncIOTransport _transport_registry["rest"] = ProvisioningRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py index a1a997220440..c82beafe4a3e 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ProvisioningTransport -from .grpc import ProvisioningGrpcTransport -from .grpc_asyncio import ProvisioningGrpcAsyncIOTransport from .rest import ProvisioningRestInterceptor, ProvisioningRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] -_transport_registry["grpc"] = ProvisioningGrpcTransport -_transport_registry["grpc_asyncio"] = ProvisioningGrpcAsyncIOTransport _transport_registry["rest"] = ProvisioningRestTransport __all__ = ( "ProvisioningTransport", - "ProvisioningGrpcTransport", - "ProvisioningGrpcAsyncIOTransport", "ProvisioningRestTransport", "ProvisioningRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py index 53b21be76ec7..28875a7e7af2 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import RuntimeProjectAttachmentServiceAsyncClient from .client import RuntimeProjectAttachmentServiceClient -__all__ = ( - "RuntimeProjectAttachmentServiceClient", - "RuntimeProjectAttachmentServiceAsyncClient", -) +__all__ = ("RuntimeProjectAttachmentServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py index 121ce0bf5470..dc9c1039381a 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -59,8 +59,6 @@ DEFAULT_CLIENT_INFO, RuntimeProjectAttachmentServiceTransport, ) -from .transports.grpc import RuntimeProjectAttachmentServiceGrpcTransport -from .transports.grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport from .transports.rest import RuntimeProjectAttachmentServiceRestTransport @@ -75,10 +73,6 @@ class RuntimeProjectAttachmentServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] - _transport_registry["grpc"] = RuntimeProjectAttachmentServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = RuntimeProjectAttachmentServiceGrpcAsyncIOTransport _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py index 05ba311342e2..7e63e765df51 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py @@ -126,96 +126,3 @@ def __iter__( def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListRuntimeProjectAttachmentsAsyncPager: - """A pager for iterating through ``list_runtime_project_attachments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``runtime_project_attachments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListRuntimeProjectAttachments`` requests and continue to iterate - through the ``runtime_project_attachments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[ - ..., - Awaitable[ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse - ], - ], - request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, - response: runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - request - ) - ) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages( - self, - ) -> AsyncIterator[ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse - ]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__( - self, - ) -> AsyncIterator[runtime_project_attachment_service.RuntimeProjectAttachment]: - async def async_generator(): - async for page in self.pages: - for response in page.runtime_project_attachments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py index e7fe76d5503e..604d33074e46 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py @@ -17,8 +17,6 @@ from typing import Dict, Type from .base import RuntimeProjectAttachmentServiceTransport -from .grpc import RuntimeProjectAttachmentServiceGrpcTransport -from .grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport from .rest import ( RuntimeProjectAttachmentServiceRestInterceptor, RuntimeProjectAttachmentServiceRestTransport, @@ -28,16 +26,10 @@ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] -_transport_registry["grpc"] = RuntimeProjectAttachmentServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = RuntimeProjectAttachmentServiceGrpcAsyncIOTransport _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport __all__ = ( "RuntimeProjectAttachmentServiceTransport", - "RuntimeProjectAttachmentServiceGrpcTransport", - "RuntimeProjectAttachmentServiceGrpcAsyncIOTransport", "RuntimeProjectAttachmentServiceRestTransport", "RuntimeProjectAttachmentServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py deleted file mode 100644 index 433fc9066d3e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - api = apihub_v1.Api() - api.display_name = "display_name_value" - - request = apihub_v1.CreateApiRequest( - parent="parent_value", - api=api, - ) - - # Make the request - response = await client.create_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py deleted file mode 100644 index 34242e5e4a77..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - attribute = apihub_v1.Attribute() - attribute.display_name = "display_name_value" - attribute.scope = "PLUGIN" - attribute.data_type = "STRING" - - request = apihub_v1.CreateAttributeRequest( - parent="parent_value", - attribute=attribute, - ) - - # Make the request - response = await client.create_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py deleted file mode 100644 index 764524b6c030..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - deployment = apihub_v1.Deployment() - deployment.display_name = "display_name_value" - deployment.deployment_type.enum_values.values.id = "id_value" - deployment.deployment_type.enum_values.values.display_name = "display_name_value" - deployment.resource_uri = "resource_uri_value" - deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] - - request = apihub_v1.CreateDeploymentRequest( - parent="parent_value", - deployment=deployment, - ) - - # Make the request - response = await client.create_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py deleted file mode 100644 index a9cff017ea20..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - external_api = apihub_v1.ExternalApi() - external_api.display_name = "display_name_value" - - request = apihub_v1.CreateExternalApiRequest( - parent="parent_value", - external_api=external_api, - ) - - # Make the request - response = await client.create_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py deleted file mode 100644 index 90481d22eee0..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - spec = apihub_v1.Spec() - spec.display_name = "display_name_value" - spec.spec_type.enum_values.values.id = "id_value" - spec.spec_type.enum_values.values.display_name = "display_name_value" - - request = apihub_v1.CreateSpecRequest( - parent="parent_value", - spec=spec, - ) - - # Make the request - response = await client.create_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py deleted file mode 100644 index 3ba50ef85c3e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - version = apihub_v1.Version() - version.display_name = "display_name_value" - - request = apihub_v1.CreateVersionRequest( - parent="parent_value", - version=version, - ) - - # Make the request - response = await client.create_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py deleted file mode 100644 index 9dcedc8d971f..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteApiRequest( - name="name_value", - ) - - # Make the request - await client.delete_api(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py deleted file mode 100644 index 3c139efd73c2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteAttributeRequest( - name="name_value", - ) - - # Make the request - await client.delete_attribute(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py deleted file mode 100644 index c187acdcc75e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteDeploymentRequest( - name="name_value", - ) - - # Make the request - await client.delete_deployment(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py deleted file mode 100644 index ee72bfdb2cb5..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteExternalApiRequest( - name="name_value", - ) - - # Make the request - await client.delete_external_api(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py deleted file mode 100644 index 18bf9aa527da..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteSpecRequest( - name="name_value", - ) - - # Make the request - await client.delete_spec(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py deleted file mode 100644 index a84532d3859c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteVersionRequest( - name="name_value", - ) - - # Make the request - await client.delete_version(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py deleted file mode 100644 index bc37ee587379..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_CreateDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - dependency = apihub_v1.Dependency() - dependency.consumer.operation_resource_name = "operation_resource_name_value" - dependency.supplier.operation_resource_name = "operation_resource_name_value" - - request = apihub_v1.CreateDependencyRequest( - parent="parent_value", - dependency=dependency, - ) - - # Make the request - response = await client.create_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_CreateDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py deleted file mode 100644 index af78f08fa7cd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_DeleteDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteDependencyRequest( - name="name_value", - ) - - # Make the request - await client.delete_dependency(request=request) - - -# [END apihub_v1_generated_ApiHubDependencies_DeleteDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py deleted file mode 100644 index 1ef59fb63cc2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_GetDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDependencyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_GetDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py deleted file mode 100644 index cbac9ee03c0e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDependencies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_ListDependencies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_dependencies(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListDependenciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dependencies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_ListDependencies_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py deleted file mode 100644 index 37b5b624db53..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_UpdateDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - dependency = apihub_v1.Dependency() - dependency.consumer.operation_resource_name = "operation_resource_name_value" - dependency.supplier.operation_resource_name = "operation_resource_name_value" - - request = apihub_v1.UpdateDependencyRequest( - dependency=dependency, - ) - - # Make the request - response = await client.update_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_UpdateDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py deleted file mode 100644 index d9a93e6ec082..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py deleted file mode 100644 index 884fa8a495b2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApiOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetApiOperation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api_operation(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiOperationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api_operation(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetApiOperation_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py deleted file mode 100644 index 8de6d7766e83..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetAttributeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py deleted file mode 100644 index b1b62ba74864..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDefinition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetDefinition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_definition(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDefinitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_definition(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetDefinition_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py deleted file mode 100644 index a16639adc526..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDeploymentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py deleted file mode 100644 index 2e44849e8c40..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetExternalApiRequest( - name="name_value", - ) - - # Make the request - response = await client.get_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py deleted file mode 100644 index 5679acd29ccd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py deleted file mode 100644 index ffaf9eaa238b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSpecContents -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetSpecContents_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_spec_contents(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetSpecContentsRequest( - name="name_value", - ) - - # Make the request - response = await client.get_spec_contents(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetSpecContents_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py deleted file mode 100644 index 2c7b1d375fee..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetVersionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py deleted file mode 100644 index 341caf033a8a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListApiOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListApiOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_api_operations(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListApiOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_api_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListApiOperations_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py deleted file mode 100644 index 5130bd9cbca6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListApis -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListApis_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_apis(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListApisRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_apis(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListApis_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py deleted file mode 100644 index 9dd769c3831b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAttributes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListAttributes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_attributes(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_attributes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListAttributes_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py deleted file mode 100644 index 7d96c577933f..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeployments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListDeployments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_deployments(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListDeploymentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deployments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListDeployments_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py deleted file mode 100644 index d45eefcb5fa6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListExternalApis -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListExternalApis_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_external_apis(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListExternalApisRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_external_apis(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListExternalApis_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py deleted file mode 100644 index 33e365b5ebaa..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSpecs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListSpecs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_specs(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_specs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListSpecs_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py deleted file mode 100644 index 795e376e27cf..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVersions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListVersions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_versions(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListVersionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_versions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListVersions_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py deleted file mode 100644 index 5c1773b84f6a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DisablePlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_DisablePlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_disable_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DisablePluginRequest( - name="name_value", - ) - - # Make the request - response = await client.disable_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_DisablePlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py deleted file mode 100644 index c2dbee38cd0b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnablePlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_EnablePlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_enable_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.EnablePluginRequest( - name="name_value", - ) - - # Make the request - response = await client.enable_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_EnablePlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py deleted file mode 100644 index 8ced7daffafe..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_GetPlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetPluginRequest( - name="name_value", - ) - - # Make the request - response = await client.get_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_GetPlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py deleted file mode 100644 index 538e2cee6af0..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_SearchResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_search_resources(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.SearchResourcesRequest( - location="location_value", - query="query_value", - ) - - # Make the request - page_result = client.search_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_SearchResources_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py deleted file mode 100644 index 4ba551de8127..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - api = apihub_v1.Api() - api.display_name = "display_name_value" - - request = apihub_v1.UpdateApiRequest( - api=api, - ) - - # Make the request - response = await client.update_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py deleted file mode 100644 index b0583bcadadb..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - attribute = apihub_v1.Attribute() - attribute.display_name = "display_name_value" - attribute.scope = "PLUGIN" - attribute.data_type = "STRING" - - request = apihub_v1.UpdateAttributeRequest( - attribute=attribute, - ) - - # Make the request - response = await client.update_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py deleted file mode 100644 index 2624bd2d844a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - deployment = apihub_v1.Deployment() - deployment.display_name = "display_name_value" - deployment.deployment_type.enum_values.values.id = "id_value" - deployment.deployment_type.enum_values.values.display_name = "display_name_value" - deployment.resource_uri = "resource_uri_value" - deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] - - request = apihub_v1.UpdateDeploymentRequest( - deployment=deployment, - ) - - # Make the request - response = await client.update_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py deleted file mode 100644 index d4eb7321ecf4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - external_api = apihub_v1.ExternalApi() - external_api.display_name = "display_name_value" - - request = apihub_v1.UpdateExternalApiRequest( - external_api=external_api, - ) - - # Make the request - response = await client.update_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py deleted file mode 100644 index 62e5c6bef8c2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - spec = apihub_v1.Spec() - spec.display_name = "display_name_value" - spec.spec_type.enum_values.values.id = "id_value" - spec.spec_type.enum_values.values.display_name = "display_name_value" - - request = apihub_v1.UpdateSpecRequest( - spec=spec, - ) - - # Make the request - response = await client.update_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py deleted file mode 100644 index 37369a1057bc..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - version = apihub_v1.Version() - version.display_name = "display_name_value" - - request = apihub_v1.UpdateVersionRequest( - version=version, - ) - - # Make the request - response = await client.update_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py deleted file mode 100644 index 1f3b26540e0c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateHostProjectRegistration -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_host_project_registration(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - host_project_registration = apihub_v1.HostProjectRegistration() - host_project_registration.gcp_project = "gcp_project_value" - - request = apihub_v1.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - host_project_registration=host_project_registration, - ) - - # Make the request - response = await client.create_host_project_registration(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py deleted file mode 100644 index b48f7c69b98c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetHostProjectRegistration -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_host_project_registration(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetHostProjectRegistrationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_host_project_registration(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py deleted file mode 100644 index 0796d381861a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListHostProjectRegistrations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_host_project_registrations(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListHostProjectRegistrationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_host_project_registrations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py deleted file mode 100644 index d6a18ab4d88e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStyleGuide -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_GetStyleGuide_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_style_guide(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetStyleGuideRequest( - name="name_value", - ) - - # Make the request - response = await client.get_style_guide(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_GetStyleGuide_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py deleted file mode 100644 index 9463dc7ebbfd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStyleGuideContents -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_GetStyleGuideContents_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_style_guide_contents(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetStyleGuideContentsRequest( - name="name_value", - ) - - # Make the request - response = await client.get_style_guide_contents(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_GetStyleGuideContents_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py deleted file mode 100644 index 03effe93eca4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LintSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_LintSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lint_spec(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LintSpecRequest( - name="name_value", - ) - - # Make the request - await client.lint_spec(request=request) - - -# [END apihub_v1_generated_LintingService_LintSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py deleted file mode 100644 index 3b819e06d8ba..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStyleGuide -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_UpdateStyleGuide_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_style_guide(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - style_guide = apihub_v1.StyleGuide() - style_guide.linter = "OTHER" - style_guide.contents.contents = b'contents_blob' - style_guide.contents.mime_type = "mime_type_value" - - request = apihub_v1.UpdateStyleGuideRequest( - style_guide=style_guide, - ) - - # Make the request - response = await client.update_style_guide(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_UpdateStyleGuide_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py deleted file mode 100644 index abb888a77b21..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateApiHubInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_Provisioning_CreateApiHubInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_api_hub_instance(): - # Create a client - client = apihub_v1.ProvisioningAsyncClient() - - # Initialize request argument(s) - api_hub_instance = apihub_v1.ApiHubInstance() - api_hub_instance.config.cmek_key_name = "cmek_key_name_value" - - request = apihub_v1.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance=api_hub_instance, - ) - - # Make the request - operation = client.create_api_hub_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END apihub_v1_generated_Provisioning_CreateApiHubInstance_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py deleted file mode 100644 index adeb99e46bf6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApiHubInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_Provisioning_GetApiHubInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api_hub_instance(): - # Create a client - client = apihub_v1.ProvisioningAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiHubInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api_hub_instance(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_Provisioning_GetApiHubInstance_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py deleted file mode 100644 index 6fa9bb31c0d4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupApiHubInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_Provisioning_LookupApiHubInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lookup_api_hub_instance(): - # Create a client - client = apihub_v1.ProvisioningAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LookupApiHubInstanceRequest( - parent="parent_value", - ) - - # Make the request - response = await client.lookup_api_hub_instance(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_Provisioning_LookupApiHubInstance_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py deleted file mode 100644 index 41874f8f4024..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - runtime_project_attachment = apihub_v1.RuntimeProjectAttachment() - runtime_project_attachment.runtime_project = "runtime_project_value" - - request = apihub_v1.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - runtime_project_attachment=runtime_project_attachment, - ) - - # Make the request - response = await client.create_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py deleted file mode 100644 index c6d606366236..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_runtime_project_attachment(request=request) - - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py deleted file mode 100644 index f174d75570c7..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py deleted file mode 100644 index 82f990e50294..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRuntimeProjectAttachments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_runtime_project_attachments(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_runtime_project_attachments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py deleted file mode 100644 index 342d51e39899..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lookup_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - response = await client.lookup_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index 26cd3b4e3072..475a2011ac20 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -14,12 +14,11 @@ { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.create_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", "method": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", "service": { @@ -62,10 +61,10 @@ "shortName": "create_dependency" }, "description": "Sample for CreateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_async.py", + "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", "segments": [ { "end": 56, @@ -98,7 +97,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" }, { "canonical": true, @@ -107,30 +106,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "CreateDependency" + "shortName": "DeleteDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDependencyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" + "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" }, { - "name": "dependency_id", + "name": "name", "type": "str" }, { @@ -146,22 +137,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "create_dependency" + "shortName": "delete_dependency" }, - "description": "Sample for CreateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", + "description": "Sample for DeleteDependency", + "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -171,44 +161,41 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.delete_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "DeleteDependency" + "shortName": "GetDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + "type": "google.cloud.apihub_v1.types.GetDependencyRequest" }, { "name": "name", @@ -227,21 +214,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_dependency" + "resultType": "google.cloud.apihub_v1.types.Dependency", + "shortName": "get_dependency" }, - "description": "Sample for DeleteDependency", - "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py", + "description": "Sample for GetDependency", + "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -256,15 +244,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" }, { "canonical": true, @@ -273,22 +263,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "DeleteDependency" + "shortName": "ListDependencies" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -304,21 +294,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_dependency" + "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", + "shortName": "list_dependencies" }, - "description": "Sample for DeleteDependency", - "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", + "description": "Sample for ListDependencies", + "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", "segments": [ { - "end": 49, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 52, "start": 27, "type": "SHORT" }, @@ -333,41 +324,46 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.get_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "GetDependency" + "shortName": "UpdateDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" }, { - "name": "name", - "type": "str" + "name": "dependency", + "type": "google.cloud.apihub_v1.types.Dependency" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -383,21 +379,21 @@ } ], "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "get_dependency" + "shortName": "update_dependency" }, - "description": "Sample for GetDependency", - "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_async.py", + "description": "Sample for UpdateDependency", + "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -407,43 +403,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "GetDependency" + "shortName": "DisablePlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + "type": "google.cloud.apihub_v1.types.DisablePluginRequest" }, { "name": "name", @@ -462,14 +458,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "get_dependency" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "disable_plugin" }, - "description": "Sample for GetDependency", - "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", + "description": "Sample for DisablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", "segments": [ { "end": 51, @@ -502,32 +498,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.list_dependencies", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "ListDependencies" + "shortName": "EnablePlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + "type": "google.cloud.apihub_v1.types.EnablePluginRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -543,22 +538,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesAsyncPager", - "shortName": "list_dependencies" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "enable_plugin" }, - "description": "Sample for ListDependencies", - "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py", + "description": "Sample for EnablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_async", + "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -578,36 +573,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py" + "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "ListDependencies" + "shortName": "GetPlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + "type": "google.cloud.apihub_v1.types.GetPluginRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -623,22 +618,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", - "shortName": "list_dependencies" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "get_plugin" }, - "description": "Sample for ListDependencies", - "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", + "description": "Sample for GetPlugin", + "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", + "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -658,4532 +653,44 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" + "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.update_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateDependency" + "shortName": "CreateApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" + "type": "google.cloud.apihub_v1.types.CreateApiRequest" }, { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" + "name": "parent", + "type": "str" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "update_dependency" - }, - "description": "Sample for UpdateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" - }, - "shortName": "UpdateDependency" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" - }, - { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "update_dependency" - }, - "description": "Sample for UpdateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.disable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "DisablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DisablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "disable_plugin" - }, - "description": "Sample for DisablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "DisablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DisablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "disable_plugin" - }, - "description": "Sample for DisablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.enable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "EnablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.EnablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "enable_plugin" - }, - "description": "Sample for EnablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "EnablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.EnablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "enable_plugin" - }, - "description": "Sample for EnablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.get_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "GetPlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetPluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "get_plugin" - }, - "description": "Sample for GetPlugin", - "file": "apihub_v1_generated_api_hub_plugin_get_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_get_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "GetPlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetPluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "get_plugin" - }, - "description": "Sample for GetPlugin", - "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" - }, - { - "name": "api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "create_api" - }, - "description": "Sample for CreateApi", - "file": "apihub_v1_generated_api_hub_create_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateApi_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" - }, - { - "name": "api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "create_api" - }, - "description": "Sample for CreateApi", - "file": "apihub_v1_generated_api_hub_create_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" - }, - { - "name": "attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "create_attribute" - }, - "description": "Sample for CreateAttribute", - "file": "apihub_v1_generated_api_hub_create_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" - }, - { - "name": "attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "create_attribute" - }, - "description": "Sample for CreateAttribute", - "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" - }, - { - "name": "deployment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "create_deployment" - }, - "description": "Sample for CreateDeployment", - "file": "apihub_v1_generated_api_hub_create_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" - }, - { - "name": "deployment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "create_deployment" - }, - "description": "Sample for CreateDeployment", - "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" - }, - { - "name": "external_api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "create_external_api" - }, - "description": "Sample for CreateExternalApi", - "file": "apihub_v1_generated_api_hub_create_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" - }, - { - "name": "external_api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "create_external_api" - }, - "description": "Sample for CreateExternalApi", - "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateSpecRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" - }, - { - "name": "spec_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "create_spec" - }, - "description": "Sample for CreateSpec", - "file": "apihub_v1_generated_api_hub_create_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateSpecRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" - }, - { - "name": "spec_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "create_spec" - }, - "description": "Sample for CreateSpec", - "file": "apihub_v1_generated_api_hub_create_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateVersionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" - }, - { - "name": "version_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "create_version" - }, - "description": "Sample for CreateVersion", - "file": "apihub_v1_generated_api_hub_create_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateVersionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" - }, - { - "name": "version_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "create_version" - }, - "description": "Sample for CreateVersion", - "file": "apihub_v1_generated_api_hub_create_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_api" - }, - "description": "Sample for DeleteApi", - "file": "apihub_v1_generated_api_hub_delete_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_api" - }, - "description": "Sample for DeleteApi", - "file": "apihub_v1_generated_api_hub_delete_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_attribute" - }, - "description": "Sample for DeleteAttribute", - "file": "apihub_v1_generated_api_hub_delete_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_attribute" - }, - "description": "Sample for DeleteAttribute", - "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deployment" - }, - "description": "Sample for DeleteDeployment", - "file": "apihub_v1_generated_api_hub_delete_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deployment" - }, - "description": "Sample for DeleteDeployment", - "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_external_api" - }, - "description": "Sample for DeleteExternalApi", - "file": "apihub_v1_generated_api_hub_delete_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_external_api" - }, - "description": "Sample for DeleteExternalApi", - "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_spec" - }, - "description": "Sample for DeleteSpec", - "file": "apihub_v1_generated_api_hub_delete_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_spec" - }, - "description": "Sample for DeleteSpec", - "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_version" - }, - "description": "Sample for DeleteVersion", - "file": "apihub_v1_generated_api_hub_delete_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_version" - }, - "description": "Sample for DeleteVersion", - "file": "apihub_v1_generated_api_hub_delete_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_api_operation", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApiOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ApiOperation", - "shortName": "get_api_operation" - }, - "description": "Sample for GetApiOperation", - "file": "apihub_v1_generated_api_hub_get_api_operation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_operation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApiOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ApiOperation", - "shortName": "get_api_operation" - }, - "description": "Sample for GetApiOperation", - "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "get_api" - }, - "description": "Sample for GetApi", - "file": "apihub_v1_generated_api_hub_get_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApi_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "get_api" - }, - "description": "Sample for GetApi", - "file": "apihub_v1_generated_api_hub_get_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "get_attribute" - }, - "description": "Sample for GetAttribute", - "file": "apihub_v1_generated_api_hub_get_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "get_attribute" - }, - "description": "Sample for GetAttribute", - "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_definition", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDefinition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Definition", - "shortName": "get_definition" - }, - "description": "Sample for GetDefinition", - "file": "apihub_v1_generated_api_hub_get_definition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_definition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDefinition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Definition", - "shortName": "get_definition" - }, - "description": "Sample for GetDefinition", - "file": "apihub_v1_generated_api_hub_get_definition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_definition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "get_deployment" - }, - "description": "Sample for GetDeployment", - "file": "apihub_v1_generated_api_hub_get_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "get_deployment" - }, - "description": "Sample for GetDeployment", - "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "get_external_api" - }, - "description": "Sample for GetExternalApi", - "file": "apihub_v1_generated_api_hub_get_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "get_external_api" - }, - "description": "Sample for GetExternalApi", - "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_spec_contents", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpecContents" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.SpecContents", - "shortName": "get_spec_contents" - }, - "description": "Sample for GetSpecContents", - "file": "apihub_v1_generated_api_hub_get_spec_contents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_contents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpecContents" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.SpecContents", - "shortName": "get_spec_contents" - }, - "description": "Sample for GetSpecContents", - "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "get_spec" - }, - "description": "Sample for GetSpec", - "file": "apihub_v1_generated_api_hub_get_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "get_spec" - }, - "description": "Sample for GetSpec", - "file": "apihub_v1_generated_api_hub_get_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "get_version" - }, - "description": "Sample for GetVersion", - "file": "apihub_v1_generated_api_hub_get_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetVersion_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "get_version" - }, - "description": "Sample for GetVersion", - "file": "apihub_v1_generated_api_hub_get_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_api_operations", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApiOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsAsyncPager", - "shortName": "list_api_operations" - }, - "description": "Sample for ListApiOperations", - "file": "apihub_v1_generated_api_hub_list_api_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_api_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApiOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", - "shortName": "list_api_operations" - }, - "description": "Sample for ListApiOperations", - "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApisRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisAsyncPager", - "shortName": "list_apis" - }, - "description": "Sample for ListApis", - "file": "apihub_v1_generated_api_hub_list_apis_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApis_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_apis_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApisRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", - "shortName": "list_apis" - }, - "description": "Sample for ListApis", - "file": "apihub_v1_generated_api_hub_list_apis_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_apis_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_attributes", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesAsyncPager", - "shortName": "list_attributes" - }, - "description": "Sample for ListAttributes", - "file": "apihub_v1_generated_api_hub_list_attributes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_attributes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListAttributesRequest" - }, - { - "name": "parent", + "name": "api_id", "type": "str" }, { @@ -5199,22 +706,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", - "shortName": "list_attributes" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "create_api" }, - "description": "Sample for ListAttributes", - "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", + "description": "Sample for CreateApi", + "file": "apihub_v1_generated_api_hub_create_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5224,103 +731,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_deployments", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListDeployments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsAsyncPager", - "shortName": "list_deployments" - }, - "description": "Sample for ListDeployments", - "file": "apihub_v1_generated_api_hub_list_deployments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, { "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_deployments_async.py" + "title": "apihub_v1_generated_api_hub_create_api_sync.py" }, { "canonical": true, @@ -5329,183 +755,30 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListDeployments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", - "shortName": "list_deployments" - }, - "description": "Sample for ListDeployments", - "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_external_apis", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListExternalApis" + "shortName": "CreateAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" }, { "name": "parent", "type": "str" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisAsyncPager", - "shortName": "list_external_apis" - }, - "description": "Sample for ListExternalApis", - "file": "apihub_v1_generated_api_hub_list_external_apis_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_external_apis_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListExternalApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" }, { - "name": "parent", + "name": "attribute_id", "type": "str" }, { @@ -5521,22 +794,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", - "shortName": "list_external_apis" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "create_attribute" }, - "description": "Sample for ListExternalApis", - "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", + "description": "Sample for CreateAttribute", + "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5546,49 +819,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" + "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_specs", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListSpecs" + "shortName": "CreateDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" }, { "name": "parent", "type": "str" }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "deployment_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5602,22 +882,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsAsyncPager", - "shortName": "list_specs" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "create_deployment" }, - "description": "Sample for ListSpecs", - "file": "apihub_v1_generated_api_hub_list_specs_async.py", + "description": "Sample for CreateDeployment", + "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_async", + "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -5627,22 +907,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_specs_async.py" + "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" }, { "canonical": true, @@ -5651,24 +931,32 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListSpecs" + "shortName": "CreateExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" }, { "name": "parent", "type": "str" }, + { + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" + }, + { + "name": "external_api_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5682,22 +970,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", - "shortName": "list_specs" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "create_external_api" }, - "description": "Sample for ListSpecs", - "file": "apihub_v1_generated_api_hub_list_specs_sync.py", + "description": "Sample for CreateExternalApi", + "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5707,49 +995,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_specs_sync.py" + "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_versions", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListVersions" + "shortName": "CreateSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + "type": "google.cloud.apihub_v1.types.CreateSpecRequest" }, { "name": "parent", "type": "str" }, + { + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" + }, + { + "name": "spec_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5763,22 +1058,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsAsyncPager", - "shortName": "list_versions" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "create_spec" }, - "description": "Sample for ListVersions", - "file": "apihub_v1_generated_api_hub_list_versions_async.py", + "description": "Sample for CreateSpec", + "file": "apihub_v1_generated_api_hub_create_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListVersions_async", + "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5788,22 +1083,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_versions_async.py" + "title": "apihub_v1_generated_api_hub_create_spec_sync.py" }, { "canonical": true, @@ -5812,24 +1107,32 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListVersions" + "shortName": "CreateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + "type": "google.cloud.apihub_v1.types.CreateVersionRequest" }, { "name": "parent", "type": "str" }, + { + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" + }, + { + "name": "version_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5843,22 +1146,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", - "shortName": "list_versions" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "create_version" }, - "description": "Sample for ListVersions", - "file": "apihub_v1_generated_api_hub_list_versions_sync.py", + "description": "Sample for CreateVersion", + "file": "apihub_v1_generated_api_hub_create_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5868,51 +1171,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_versions_sync.py" + "title": "apihub_v1_generated_api_hub_create_version_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.search_resources", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "SearchResources" + "shortName": "DeleteApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" - }, - { - "name": "location", - "type": "str" + "type": "google.cloud.apihub_v1.types.DeleteApiRequest" }, { - "name": "query", + "name": "name", "type": "str" }, { @@ -5928,22 +1226,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesAsyncPager", - "shortName": "search_resources" + "shortName": "delete_api" }, - "description": "Sample for SearchResources", - "file": "apihub_v1_generated_api_hub_search_resources_async.py", + "description": "Sample for DeleteApi", + "file": "apihub_v1_generated_api_hub_delete_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_SearchResources_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -5953,22 +1250,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_search_resources_async.py" + "title": "apihub_v1_generated_api_hub_delete_api_sync.py" }, { "canonical": true, @@ -5977,26 +1272,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "SearchResources" + "shortName": "DeleteAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" - }, - { - "name": "location", - "type": "str" + "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" }, { - "name": "query", + "name": "name", "type": "str" }, { @@ -6012,22 +1303,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", - "shortName": "search_resources" + "shortName": "delete_attribute" }, - "description": "Sample for SearchResources", - "file": "apihub_v1_generated_api_hub_search_resources_sync.py", + "description": "Sample for DeleteAttribute", + "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6037,52 +1327,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_search_resources_sync.py" + "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateApi" + "shortName": "DeleteDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateApiRequest" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" + "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6097,22 +1380,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "update_api" + "shortName": "delete_deployment" }, - "description": "Sample for UpdateApi", - "file": "apihub_v1_generated_api_hub_update_api_async.py", + "description": "Sample for DeleteDeployment", + "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6122,22 +1404,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_api_async.py" + "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" }, { "canonical": true, @@ -6146,27 +1426,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateApi" + "shortName": "DeleteExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateApiRequest" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" + "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6181,22 +1457,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "update_api" + "shortName": "delete_external_api" }, - "description": "Sample for UpdateApi", - "file": "apihub_v1_generated_api_hub_update_api_sync.py", + "description": "Sample for DeleteExternalApi", + "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6206,52 +1481,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_api_sync.py" + "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_attribute", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateAttribute" + "shortName": "DeleteSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" + "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6266,22 +1534,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "update_attribute" + "shortName": "delete_spec" }, - "description": "Sample for UpdateAttribute", - "file": "apihub_v1_generated_api_hub_update_attribute_async.py", + "description": "Sample for DeleteSpec", + "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6291,22 +1558,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_attribute_async.py" + "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" }, { "canonical": true, @@ -6315,27 +1580,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateAttribute" + "shortName": "DeleteVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" + "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6350,22 +1611,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "update_attribute" + "shortName": "delete_version" }, - "description": "Sample for UpdateAttribute", - "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", + "description": "Sample for DeleteVersion", + "file": "apihub_v1_generated_api_hub_delete_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6375,52 +1635,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" + "title": "apihub_v1_generated_api_hub_delete_version_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_deployment", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateDeployment" + "shortName": "GetApiOperation" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" + "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6435,22 +1688,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "update_deployment" + "resultType": "google.cloud.apihub_v1.types.ApiOperation", + "shortName": "get_api_operation" }, - "description": "Sample for UpdateDeployment", - "file": "apihub_v1_generated_api_hub_update_deployment_async.py", + "description": "Sample for GetApiOperation", + "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_async", + "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", "segments": [ { - "end": 58, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6460,22 +1713,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_deployment_async.py" + "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" }, { "canonical": true, @@ -6484,27 +1737,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateDeployment" + "shortName": "GetApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" + "type": "google.cloud.apihub_v1.types.GetApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6519,22 +1768,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "update_deployment" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "get_api" }, - "description": "Sample for UpdateDeployment", - "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", + "description": "Sample for GetApi", + "file": "apihub_v1_generated_api_hub_get_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", "segments": [ { - "end": 58, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6544,52 +1793,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" + "title": "apihub_v1_generated_api_hub_get_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_external_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateExternalApi" + "shortName": "GetAttribute" }, "parameters": [ { - "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" + "name": "request", + "type": "google.cloud.apihub_v1.types.GetAttributeRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6604,22 +1848,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "update_external_api" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "get_attribute" }, - "description": "Sample for UpdateExternalApi", - "file": "apihub_v1_generated_api_hub_update_external_api_async.py", + "description": "Sample for GetAttribute", + "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_async", + "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6629,22 +1873,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_external_api_async.py" + "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" }, { "canonical": true, @@ -6653,27 +1897,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateExternalApi" + "shortName": "GetDefinition" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" + "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6688,22 +1928,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "update_external_api" + "resultType": "google.cloud.apihub_v1.types.Definition", + "shortName": "get_definition" }, - "description": "Sample for UpdateExternalApi", - "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", + "description": "Sample for GetDefinition", + "file": "apihub_v1_generated_api_hub_get_definition_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6713,52 +1953,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" + "title": "apihub_v1_generated_api_hub_get_definition_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateSpec" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" + "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6773,22 +2008,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "update_spec" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for UpdateSpec", - "file": "apihub_v1_generated_api_hub_update_spec_async.py", + "description": "Sample for GetDeployment", + "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_async", + "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6798,22 +2033,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_spec_async.py" + "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" }, { "canonical": true, @@ -6822,27 +2057,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateSpec" + "shortName": "GetExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" + "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6857,22 +2088,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "update_spec" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "get_external_api" }, - "description": "Sample for UpdateSpec", - "file": "apihub_v1_generated_api_hub_update_spec_sync.py", + "description": "Sample for GetExternalApi", + "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6882,52 +2113,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_spec_sync.py" + "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_version", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateVersion" + "shortName": "GetSpecContents" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" + "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6942,22 +2168,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "update_version" + "resultType": "google.cloud.apihub_v1.types.SpecContents", + "shortName": "get_spec_contents" }, - "description": "Sample for UpdateVersion", - "file": "apihub_v1_generated_api_hub_update_version_async.py", + "description": "Sample for GetSpecContents", + "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_async", + "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6967,22 +2193,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_version_async.py" + "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" }, { "canonical": true, @@ -6991,27 +2217,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateVersion" + "shortName": "GetSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" + "type": "google.cloud.apihub_v1.types.GetSpecRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -7026,22 +2248,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "update_version" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "get_spec" }, - "description": "Sample for UpdateVersion", - "file": "apihub_v1_generated_api_hub_update_version_sync.py", + "description": "Sample for GetSpec", + "file": "apihub_v1_generated_api_hub_get_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7051,55 +2273,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_version_sync.py" + "title": "apihub_v1_generated_api_hub_get_spec_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.create_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateHostProjectRegistration" + "shortName": "GetVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "host_project_registration", - "type": "google.cloud.apihub_v1.types.HostProjectRegistration" + "type": "google.cloud.apihub_v1.types.GetVersionRequest" }, { - "name": "host_project_registration_id", + "name": "name", "type": "str" }, { @@ -7115,22 +2328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "create_host_project_registration" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "get_version" }, - "description": "Sample for CreateHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py", + "description": "Sample for GetVersion", + "file": "apihub_v1_generated_api_hub_get_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async", + "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7140,56 +2353,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py" + "title": "apihub_v1_generated_api_hub_get_version_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateHostProjectRegistration" + "shortName": "ListApiOperations" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "host_project_registration", - "type": "google.cloud.apihub_v1.types.HostProjectRegistration" - }, - { - "name": "host_project_registration_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -7203,22 +2408,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "create_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", + "shortName": "list_api_operations" }, - "description": "Sample for CreateHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", + "description": "Sample for ListApiOperations", + "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7228,47 +2433,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" + "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.get_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetHostProjectRegistration" + "shortName": "ListApis" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListApisRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7284,22 +2488,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "get_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", + "shortName": "list_apis" }, - "description": "Sample for GetHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py", + "description": "Sample for ListApis", + "file": "apihub_v1_generated_api_hub_list_apis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async", + "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7319,36 +2523,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py" + "title": "apihub_v1_generated_api_hub_list_apis_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetHostProjectRegistration" + "shortName": "ListAttributes" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListAttributesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7364,22 +2568,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "get_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", + "shortName": "list_attributes" }, - "description": "Sample for GetHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", + "description": "Sample for ListAttributes", + "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7399,34 +2603,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" + "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.list_host_project_registrations", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "ListHostProjectRegistrations" + "shortName": "ListDeployments" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" }, { "name": "parent", @@ -7445,14 +2648,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsAsyncPager", - "shortName": "list_host_project_registrations" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", + "shortName": "list_deployments" }, - "description": "Sample for ListHostProjectRegistrations", - "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py", + "description": "Sample for ListDeployments", + "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async", + "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", "segments": [ { "end": 52, @@ -7485,28 +2688,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py" + "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "ListHostProjectRegistrations" + "shortName": "ListExternalApis" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" }, { "name": "parent", @@ -7525,14 +2728,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", - "shortName": "list_host_project_registrations" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", + "shortName": "list_external_apis" }, - "description": "Sample for ListHostProjectRegistrations", - "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", + "description": "Sample for ListExternalApis", + "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", "segments": [ { "end": 52, @@ -7565,32 +2768,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" + "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.get_style_guide_contents", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuideContents" + "shortName": "ListSpecs" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + "type": "google.cloud.apihub_v1.types.ListSpecsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7606,22 +2808,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", - "shortName": "get_style_guide_contents" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", + "shortName": "list_specs" }, - "description": "Sample for GetStyleGuideContents", - "file": "apihub_v1_generated_linting_service_get_style_guide_contents_async.py", + "description": "Sample for ListSpecs", + "file": "apihub_v1_generated_api_hub_list_specs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_async", + "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7641,36 +2843,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_contents_async.py" + "title": "apihub_v1_generated_api_hub_list_specs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuideContents" + "shortName": "ListVersions" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + "type": "google.cloud.apihub_v1.types.ListVersionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7686,22 +2888,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", - "shortName": "get_style_guide_contents" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", + "shortName": "list_versions" }, - "description": "Sample for GetStyleGuideContents", - "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", + "description": "Sample for ListVersions", + "file": "apihub_v1_generated_api_hub_list_versions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7721,37 +2923,40 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" + "title": "apihub_v1_generated_api_hub_list_versions_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.get_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuide" + "shortName": "SearchResources" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" }, { - "name": "name", + "name": "location", + "type": "str" + }, + { + "name": "query", "type": "str" }, { @@ -7767,22 +2972,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "get_style_guide" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", + "shortName": "search_resources" }, - "description": "Sample for GetStyleGuide", - "file": "apihub_v1_generated_linting_service_get_style_guide_async.py", + "description": "Sample for SearchResources", + "file": "apihub_v1_generated_api_hub_search_resources_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_async", + "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -7792,47 +2997,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_async.py" + "title": "apihub_v1_generated_api_hub_search_resources_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuide" + "shortName": "UpdateApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateApiRequest" }, { - "name": "name", - "type": "str" + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7847,22 +3056,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "get_style_guide" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "update_api" }, - "description": "Sample for GetStyleGuide", - "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", + "description": "Sample for UpdateApi", + "file": "apihub_v1_generated_api_hub_update_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -7872,44 +3081,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" + "title": "apihub_v1_generated_api_hub_update_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.lint_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "LintSpec" + "shortName": "UpdateAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LintSpecRequest" + "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" + }, + { + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7924,21 +3140,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "lint_spec" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "update_attribute" }, - "description": "Sample for LintSpec", - "file": "apihub_v1_generated_linting_service_lint_spec_async.py", + "description": "Sample for UpdateAttribute", + "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_LintSpec_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -7948,41 +3165,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_lint_spec_async.py" + "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "LintSpec" + "shortName": "UpdateDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LintSpecRequest" + "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" + }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7997,21 +3224,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "lint_spec" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "update_deployment" }, - "description": "Sample for LintSpec", - "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", + "description": "Sample for UpdateDeployment", + "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", "segments": [ { - "end": 49, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 58, "start": 27, "type": "SHORT" }, @@ -8021,46 +3249,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" + "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.update_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateStyleGuide" + "shortName": "UpdateExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" }, { - "name": "style_guide", - "type": "google.cloud.apihub_v1.types.StyleGuide" + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" }, { "name": "update_mask", @@ -8079,22 +3308,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "update_style_guide" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "update_external_api" }, - "description": "Sample for UpdateStyleGuide", - "file": "apihub_v1_generated_linting_service_update_style_guide_async.py", + "description": "Sample for UpdateExternalApi", + "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", "segments": [ { - "end": 56, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 54, "start": 27, "type": "SHORT" }, @@ -8104,47 +3333,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_update_style_guide_async.py" + "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateStyleGuide" + "shortName": "UpdateSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" }, { - "name": "style_guide", - "type": "google.cloud.apihub_v1.types.StyleGuide" + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" }, { "name": "update_mask", @@ -8163,14 +3392,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "update_style_guide" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "update_spec" }, - "description": "Sample for UpdateStyleGuide", - "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", + "description": "Sample for UpdateSpec", + "file": "apihub_v1_generated_api_hub_update_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", "segments": [ { "end": 56, @@ -8203,41 +3432,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" + "title": "apihub_v1_generated_api_hub_update_spec_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.create_api_hub_instance", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateApiHubInstance" + "shortName": "UpdateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" }, { - "name": "api_hub_instance", - "type": "google.cloud.apihub_v1.types.ApiHubInstance" + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" }, { - "name": "api_hub_instance_id", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -8252,22 +3476,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "update_version" }, - "description": "Sample for CreateApiHubInstance", - "file": "apihub_v1_generated_provisioning_create_api_hub_instance_async.py", + "description": "Sample for UpdateVersion", + "file": "apihub_v1_generated_api_hub_update_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", "segments": [ { - "end": 59, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 54, "start": 27, "type": "SHORT" }, @@ -8277,54 +3501,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_create_api_hub_instance_async.py" + "title": "apihub_v1_generated_api_hub_update_version_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "CreateApiHubInstance" + "shortName": "CreateHostProjectRegistration" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" }, { "name": "parent", "type": "str" }, { - "name": "api_hub_instance", - "type": "google.cloud.apihub_v1.types.ApiHubInstance" + "name": "host_project_registration", + "type": "google.cloud.apihub_v1.types.HostProjectRegistration" }, { - "name": "api_hub_instance_id", + "name": "host_project_registration_id", "type": "str" }, { @@ -8340,22 +3564,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "create_host_project_registration" }, - "description": "Sample for CreateApiHubInstance", - "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", + "description": "Sample for CreateHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", "segments": [ { - "end": 59, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 56, "start": 27, "type": "SHORT" }, @@ -8365,44 +3589,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" + "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.get_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "GetApiHubInstance" + "shortName": "GetHostProjectRegistration" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" }, { "name": "name", @@ -8421,14 +3644,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", - "shortName": "get_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "get_host_project_registration" }, - "description": "Sample for GetApiHubInstance", - "file": "apihub_v1_generated_provisioning_get_api_hub_instance_async.py", + "description": "Sample for GetHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_async", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", "segments": [ { "end": 51, @@ -8461,31 +3684,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_get_api_hub_instance_async.py" + "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "GetApiHubInstance" + "shortName": "ListHostProjectRegistrations" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -8501,22 +3724,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", - "shortName": "get_api_hub_instance" + "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", + "shortName": "list_host_project_registrations" }, - "description": "Sample for GetApiHubInstance", - "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", + "description": "Sample for ListHostProjectRegistrations", + "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -8536,37 +3759,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" + "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.lookup_api_hub_instance", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "LookupApiHubInstance" + "shortName": "GetStyleGuideContents" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -8582,14 +3804,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", - "shortName": "lookup_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", + "shortName": "get_style_guide_contents" }, - "description": "Sample for LookupApiHubInstance", - "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py", + "description": "Sample for GetStyleGuideContents", + "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_async", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", "segments": [ { "end": 51, @@ -8622,31 +3844,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py" + "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "LookupApiHubInstance" + "shortName": "GetStyleGuide" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -8662,14 +3884,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", - "shortName": "lookup_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "get_style_guide" }, - "description": "Sample for LookupApiHubInstance", - "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", + "description": "Sample for GetStyleGuide", + "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", "segments": [ { "end": 51, @@ -8702,41 +3924,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" + "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.create_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "CreateRuntimeProjectAttachment" + "shortName": "LintSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "runtime_project_attachment", - "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" - }, - { - "name": "runtime_project_attachment_id", - "type": "str" + "type": "google.cloud.apihub_v1.types.LintSpecRequest" }, { "name": "retry", @@ -8751,22 +3960,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "create_runtime_project_attachment" + "shortName": "lint_spec" }, - "description": "Sample for CreateRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py", + "description": "Sample for LintSpec", + "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -8776,55 +3984,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", - "shortName": "RuntimeProjectAttachmentServiceClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "CreateRuntimeProjectAttachment" + "shortName": "UpdateStyleGuide" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" }, { - "name": "runtime_project_attachment", - "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + "name": "style_guide", + "type": "google.cloud.apihub_v1.types.StyleGuide" }, { - "name": "runtime_project_attachment_id", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -8839,14 +4041,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "create_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "update_style_guide" }, - "description": "Sample for CreateRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", + "description": "Sample for UpdateStyleGuide", + "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", "segments": [ { "end": 56, @@ -8879,32 +4081,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.delete_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "DeleteRuntimeProjectAttachment" + "shortName": "CreateApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "api_hub_instance", + "type": "google.cloud.apihub_v1.types.ApiHubInstance" + }, + { + "name": "api_hub_instance_id", "type": "str" }, { @@ -8920,21 +4129,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_runtime_project_attachment" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_api_hub_instance" }, - "description": "Sample for DeleteRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py", + "description": "Sample for CreateApiHubInstance", + "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", "segments": [ { - "end": 49, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 59, "start": 27, "type": "SHORT" }, @@ -8944,41 +4154,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", - "shortName": "RuntimeProjectAttachmentServiceClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "DeleteRuntimeProjectAttachment" + "shortName": "GetApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" }, { "name": "name", @@ -8997,21 +4209,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", + "shortName": "get_api_hub_instance" }, - "description": "Sample for DeleteRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", + "description": "Sample for GetApiHubInstance", + "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -9026,40 +4239,41 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.get_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "GetRuntimeProjectAttachment" + "shortName": "LookupApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -9075,14 +4289,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "get_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", + "shortName": "lookup_api_hub_instance" }, - "description": "Sample for GetRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py", + "description": "Sample for LookupApiHubInstance", + "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", "segments": [ { "end": 51, @@ -9115,7 +4329,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" }, { "canonical": true, @@ -9124,22 +4338,30 @@ "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "GetRuntimeProjectAttachment" + "shortName": "CreateRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "runtime_project_attachment", + "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + }, + { + "name": "runtime_project_attachment_id", "type": "str" }, { @@ -9156,21 +4378,21 @@ } ], "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "get_runtime_project_attachment" + "shortName": "create_runtime_project_attachment" }, - "description": "Sample for GetRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", + "description": "Sample for CreateRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", "segments": [ { - "end": 51, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 56, "start": 27, "type": "SHORT" }, @@ -9180,47 +4402,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.list_runtime_project_attachments", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "ListRuntimeProjectAttachments" + "shortName": "DeleteRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -9236,22 +4457,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsAsyncPager", - "shortName": "list_runtime_project_attachments" + "shortName": "delete_runtime_project_attachment" }, - "description": "Sample for ListRuntimeProjectAttachments", - "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py", + "description": "Sample for DeleteRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -9266,17 +4486,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" }, { "canonical": true, @@ -9285,22 +4503,22 @@ "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "ListRuntimeProjectAttachments" + "shortName": "GetRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -9316,22 +4534,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", - "shortName": "list_runtime_project_attachments" + "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", + "shortName": "get_runtime_project_attachment" }, - "description": "Sample for ListRuntimeProjectAttachments", - "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", + "description": "Sample for GetRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -9351,37 +4569,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.lookup_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "LookupRuntimeProjectAttachment" + "shortName": "ListRuntimeProjectAttachments" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -9397,22 +4614,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentResponse", - "shortName": "lookup_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", + "shortName": "list_runtime_project_attachments" }, - "description": "Sample for LookupRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py", + "description": "Sample for ListRuntimeProjectAttachments", + "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -9432,12 +4649,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py index aaec1e3055f6..5f7c71ab5ffb 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -47,12 +47,7 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.cloud.apihub_v1.services.api_hub import ( - ApiHubAsyncClient, - ApiHubClient, - pagers, - transports, -) +from google.cloud.apihub_v1.services.api_hub import ApiHubClient, pagers, transports from google.cloud.apihub_v1.types import apihub_service, common_fields @@ -180,11 +175,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -266,7 +256,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -346,8 +335,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubClient, "grpc"), - (ApiHubAsyncClient, "grpc_asyncio"), (ApiHubClient, "rest"), ], ) @@ -372,8 +359,6 @@ def test_api_hub_client_from_service_account_info(client_class, transport_name): @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubGrpcTransport, "grpc"), - (transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubRestTransport, "rest"), ], ) @@ -396,8 +381,6 @@ def test_api_hub_client_service_account_always_use_jwt(transport_class, transpor @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubClient, "grpc"), - (ApiHubAsyncClient, "grpc_asyncio"), (ApiHubClient, "rest"), ], ) @@ -429,20 +412,17 @@ def test_api_hub_client_from_service_account_file(client_class, transport_name): def test_api_hub_client_get_transport_class(): transport = ApiHubClient.get_transport_class() available_transports = [ - transports.ApiHubGrpcTransport, transports.ApiHubRestTransport, ] assert transport in available_transports - transport = ApiHubClient.get_transport_class("grpc") - assert transport == transports.ApiHubGrpcTransport + transport = ApiHubClient.get_transport_class("rest") + assert transport == transports.ApiHubRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -451,11 +431,6 @@ def test_api_hub_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test_api_hub_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(ApiHubClient, "get_transport_class") as gtc: @@ -587,20 +562,6 @@ def test_api_hub_client_client_options(client_class, transport_class, transport_ @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", "true"), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", "false"), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ApiHubClient, transports.ApiHubRestTransport, "rest", "true"), (ApiHubClient, transports.ApiHubRestTransport, "rest", "false"), ], @@ -610,11 +571,6 @@ def test_api_hub_client_client_options(client_class, transport_class, transport_ "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -718,13 +674,10 @@ def test_api_hub_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ApiHubClient, ApiHubAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubClient]) @mock.patch.object( ApiHubClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubClient) ) -@mock.patch.object( - ApiHubAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubAsyncClient) -) def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -816,17 +769,12 @@ def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ApiHubClient, ApiHubAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubClient]) @mock.patch.object( ApiHubClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test_api_hub_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -903,8 +851,6 @@ def test_api_hub_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -936,13 +882,6 @@ def test_api_hub_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", grpc_helpers), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ApiHubClient, transports.ApiHubRestTransport, "rest", None), ], ) @@ -970,89 +909,6 @@ def test_api_hub_client_client_options_credentials_file( ) -def test_api_hub_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", grpc_helpers), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1060,33 +916,135 @@ def test_api_hub_client_create_channel_credentials_file( dict, ], ) -def test_create_api(request_type, transport: str = "grpc"): +def test_create_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.create_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -1097,62 +1055,13 @@ def test_create_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_create_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest() - - -def test_create_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateApiRequest( - parent="parent_value", - api_id="api_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest( - parent="parent_value", - api_id="api_id_value", - ) - - -def test_create_api_use_cached_wrapped_rpc(): +def test_create_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1168,6 +1077,7 @@ def test_create_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_api] = mock_rpc + request = {} client.create_api(request) @@ -1181,272 +1091,228 @@ def test_create_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.create_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest() - +def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.create_api - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_api - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.create_api(request) + jsonified_request["parent"] = "parent_value" - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.create_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) -@pytest.mark.asyncio -async def test_create_api_async_from_dict(): - await test_create_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateApiRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value = common_fields.Api() - client.create_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiId",)) + & set( + ( + "parent", + "api", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateApiRequest.pb( + apihub_service.CreateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateApiRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.create_api(request) + request = apihub_service.CreateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_api_flattened(): +def test_create_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_api( - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].api_id - mock_val = "api_id_value" - assert arg == mock_val - - -def test_create_api_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api( - apihub_service.CreateApiRequest(), - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api(request) -@pytest.mark.asyncio -async def test_create_api_flattened_async(): - client = ApiHubAsyncClient( +def test_create_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_api( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", api=common_fields.Api(name="name_value"), api_id="api_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].api_id - mock_val = "api_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_api_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) + + +def test_create_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_api( + client.create_api( apihub_service.CreateApiRequest(), parent="parent_value", api=common_fields.Api(name="name_value"), @@ -1454,6 +1320,12 @@ async def test_create_api_flattened_error_async(): ) +def test_create_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1461,33 +1333,37 @@ async def test_create_api_flattened_error_async(): dict, ], ) -def test_get_api(request_type, transport: str = "grpc"): +def test_get_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.get_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -1498,60 +1374,13 @@ def test_get_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_get_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest() - - -def test_get_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest( - name="name_value", - ) - - -def test_get_api_use_cached_wrapped_rpc(): +def test_get_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1567,6 +1396,7 @@ def test_get_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_api] = mock_rpc + request = {} client.get_api(request) @@ -1580,259 +1410,224 @@ def test_get_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.get_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest() - - -@pytest.mark.asyncio -async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.get_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) -@pytest.mark.asyncio -async def test_get_api_async_from_dict(): - await test_get_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value = common_fields.Api() - client.get_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.get_api(request) + request = apihub_service.GetApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_get_api_flattened(): +def test_get_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_api( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api(request) -def test_get_api_flattened_error(): +def test_get_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api( - apihub_service.GetApiRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api( + client.get_api( apihub_service.GetApiRequest(), name="name_value", ) +def test_get_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1840,93 +1635,46 @@ async def test_get_api_flattened_error_async(): dict, ], ) -def test_list_apis(request_type, transport: str = "grpc"): +def test_list_apis_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse( next_page_token="next_page_token_value", ) - response = client.list_apis(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApisRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListApisPager) assert response.next_page_token == "next_page_token_value" -def test_list_apis_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest() - - -def test_list_apis_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListApisRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_apis(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_apis_use_cached_wrapped_rpc(): +def test_list_apis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1942,6 +1690,7 @@ def test_list_apis_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc + request = {} client.list_apis(request) @@ -1955,262 +1704,250 @@ def test_list_apis_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_apis_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_apis - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_apis - ] = mock_rpc - - request = {} - await client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_apis_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListApisRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_apis(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApisRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApisAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) -@pytest.mark.asyncio -async def test_list_apis_async_from_dict(): - await test_list_apis_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_apis_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value = apihub_service.ListApisResponse() - client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_apis_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApisResponse.to_json( apihub_service.ListApisResponse() ) - await client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_apis_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApisResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_apis( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_apis_flattened_error(): +def test_list_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApisRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_apis( - apihub_service.ListApisRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_apis(request) -@pytest.mark.asyncio -async def test_list_apis_flattened_async(): - client = ApiHubAsyncClient( +def test_list_apis_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_apis( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_apis(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_apis_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_apis( + client.list_apis( apihub_service.ListApisRequest(), parent="parent_value", ) -def test_list_apis_pager(transport_name: str = "grpc"): +def test_list_apis_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListApisResponse( apis=[ common_fields.Api(), common_fields.Api(), @@ -2234,162 +1971,27 @@ def test_list_apis_pager(transport_name: str = "grpc"): common_fields.Api(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_apis(request={}, retry=retry, timeout=timeout) + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_apis(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Api) for i in results) - -def test_list_apis_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - pages = list(client.list_apis(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_apis_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_apis), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_apis( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Api) for i in responses) - - -@pytest.mark.asyncio -async def test_list_apis_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_apis), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_apis(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_apis(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2401,33 +2003,135 @@ async def test_list_apis_async_pages(): dict, ], ) -def test_update_api(request_type, transport: str = "grpc"): +def test_update_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request_init["api"] = { + "name": "projects/sample1/locations/sample2/apis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.update_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -2438,61 +2142,18 @@ def test_update_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_update_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +def test_update_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client.update_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - -def test_update_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateApiRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - - -def test_update_api_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.update_api in client._transport._wrapped_methods @@ -2503,6 +2164,7 @@ def test_update_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_api] = mock_rpc + request = {} client.update_api(request) @@ -2516,210 +2178,218 @@ def test_update_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.update_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - +def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.update_api - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_api - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.update_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with non-default values are left alone - await client.update_api(request) + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.asyncio -async def test_update_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateApiRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.update_api(request) + response = client.update_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateApiRequest() - assert args[0] == request + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" +def test_update_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -@pytest.mark.asyncio -async def test_update_api_async_from_dict(): - await test_update_api_async(request_type=dict) + unset_fields = transport.update_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "api", + "updateMask", + ) + ) + ) -def test_update_api_field_headers(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateApiRequest.pb( + apihub_service.UpdateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateApiRequest() - - request.api.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value = common_fields.Api() - client.update_api(request) + request = apihub_service.UpdateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.update_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "api.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_update_api_field_headers_async(): - client = ApiHubAsyncClient( +def test_update_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateApiRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateApiRequest() - - request.api.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.update_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "api.name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_api(request) -def test_update_api_flattened(): +def test_update_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = { + "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( api=common_fields.Api(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -def test_update_api_flattened_error(): +def test_update_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2732,140 +2402,54 @@ def test_update_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_api( - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_api_flattened_error_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteApiRequest, + dict, + ], +) +def test_delete_api_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_api( - apihub_service.UpdateApiRequest(), - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteApiRequest, - dict, - ], -) -def test_delete_api(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.delete_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteApiRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert response is None -def test_delete_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest() - - -def test_delete_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest( - name="name_value", - ) - - -def test_delete_api_use_cached_wrapped_rpc(): +def test_delete_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2881,6 +2465,7 @@ def test_delete_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc + request = {} client.delete_api(request) @@ -2894,185 +2479,200 @@ def test_delete_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest() - - -@pytest.mark.asyncio -async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_api(request) + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteApiRequest() - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the response is the type that we expect. - assert response is None + response = client.delete_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_api_async_from_dict(): - await test_delete_api_async(request_type=dict) +def test_delete_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.delete_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -def test_delete_api_field_headers(): - client = ApiHubClient( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteApiRequest.pb( + apihub_service.DeleteApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteApiRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value = None - client.delete_api(request) + request = apihub_service.DeleteApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -@pytest.mark.asyncio -async def test_delete_api_field_headers_async(): - client = ApiHubAsyncClient( +def test_delete_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteApiRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_api(request) -def test_delete_api_flattened(): +def test_delete_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -def test_delete_api_flattened_error(): +def test_delete_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3084,47 +2684,11 @@ def test_delete_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_delete_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_api( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_api_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_api( - apihub_service.DeleteApiRequest(), - name="name_value", - ) - @pytest.mark.parametrize( "request_type", @@ -3133,20 +2697,118 @@ async def test_delete_api_flattened_error_async(): dict, ], ) -def test_create_version(request_type, transport: str = "grpc"): +def test_create_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request_init["version"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -3156,13 +2818,17 @@ def test_create_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.create_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -3176,62 +2842,13 @@ def test_create_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_create_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest() - - -def test_create_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateVersionRequest( - parent="parent_value", - version_id="version_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest( - parent="parent_value", - version_id="version_id_value", - ) - - -def test_create_version_use_cached_wrapped_rpc(): +def test_create_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3247,6 +2864,7 @@ def test_create_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_version] = mock_rpc + request = {} client.create_version(request) @@ -3260,287 +2878,233 @@ def test_create_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.create_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest() - - -@pytest.mark.asyncio -async def test_create_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_version_rest_required_fields( + request_type=apihub_service.CreateVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_version - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_version - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.create_version(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_version(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.create_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) -@pytest.mark.asyncio -async def test_create_version_async_from_dict(): - await test_create_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateVersionRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value = common_fields.Version() - client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("versionId",)) + & set( + ( + "parent", + "version", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateVersionRequest.pb( + apihub_service.CreateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateVersionRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -def test_create_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_version( - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].version_id - mock_val = "version_id_value" - assert arg == mock_val - - -def test_create_version_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_version( - apihub_service.CreateVersionRequest(), - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_version(request) -@pytest.mark.asyncio -async def test_create_version_flattened_async(): - client = ApiHubAsyncClient( +def test_create_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_version( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", version=common_fields.Version(name="name_value"), version_id="version_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].version_id - mock_val = "version_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_version_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) + + +def test_create_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_version( + client.create_version( apihub_service.CreateVersionRequest(), parent="parent_value", version=common_fields.Version(name="name_value"), @@ -3548,6 +3112,12 @@ async def test_create_version_flattened_error_async(): ) +def test_create_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3555,20 +3125,22 @@ async def test_create_version_flattened_error_async(): dict, ], ) -def test_get_version(request_type, transport: str = "grpc"): +def test_get_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -3578,13 +3150,17 @@ def test_get_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.get_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -3598,60 +3174,13 @@ def test_get_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_get_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest() - - -def test_get_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetVersionRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest( - name="name_value", - ) - - -def test_get_version_use_cached_wrapped_rpc(): +def test_get_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3667,6 +3196,7 @@ def test_get_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_version] = mock_rpc + request = {} client.get_version(request) @@ -3680,274 +3210,235 @@ def test_get_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.get_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest() - - -@pytest.mark.asyncio -async def test_get_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_version_rest_required_fields( + request_type=apihub_service.GetVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_version - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_version - ] = mock_rpc + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.get_version(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.get_version(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_get_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetVersionRequest -): - client = ApiHubAsyncClient( + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.get_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) -@pytest.mark.asyncio -async def test_get_version_async_from_dict(): - await test_get_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = common_fields.Version() - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetVersionRequest.pb( + apihub_service.GetVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_get_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_version( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_version_flattened_error(): +def test_get_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - apihub_service.GetVersionRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_version(request) -@pytest.mark.asyncio -async def test_get_version_flattened_async(): - client = ApiHubAsyncClient( +def test_get_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_version( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_version_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_version( + client.get_version( apihub_service.GetVersionRequest(), name="name_value", ) +def test_get_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3955,93 +3446,46 @@ async def test_get_version_flattened_error_async(): dict, ], ) -def test_list_versions(request_type, transport: str = "grpc"): +def test_list_versions_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse( next_page_token="next_page_token_value", ) - response = client.list_versions(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListVersionsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListVersionsPager) assert response.next_page_token == "next_page_token_value" -def test_list_versions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_versions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest() - - -def test_list_versions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListVersionsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_versions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_versions_use_cached_wrapped_rpc(): +def test_list_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -4057,6 +3501,7 @@ def test_list_versions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc + request = {} client.list_versions(request) @@ -4070,263 +3515,254 @@ def test_list_versions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_versions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_versions_rest_required_fields( + request_type=apihub_service.ListVersionsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_versions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_versions_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_versions - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_versions - ] = mock_rpc - - request = {} - await client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_versions_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListVersionsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_versions(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListVersionsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) -@pytest.mark.asyncio -async def test_list_versions_async_from_dict(): - await test_list_versions_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_versions_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_versions_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListVersionsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = apihub_service.ListVersionsResponse() - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_versions_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_versions_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_versions" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListVersionsRequest.pb( + apihub_service.ListVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListVersionsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListVersionsResponse.to_json( apihub_service.ListVersionsResponse() ) - await client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + request = apihub_service.ListVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListVersionsResponse() -def test_list_versions_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_versions( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_versions_flattened_error(): +def test_list_versions_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListVersionsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - apihub_service.ListVersionsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_versions(request) -@pytest.mark.asyncio -async def test_list_versions_flattened_async(): - client = ApiHubAsyncClient( +def test_list_versions_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_versions( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_versions(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_versions_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_versions_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_versions( + client.list_versions( apihub_service.ListVersionsRequest(), parent="parent_value", ) -def test_list_versions_pager(transport_name: str = "grpc"): +def test_list_versions_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListVersionsResponse( versions=[ common_fields.Version(), @@ -4351,164 +3787,31 @@ def test_list_versions_pager(transport_name: str = "grpc"): common_fields.Version(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListVersionsResponse.to_json(x) for x in response ) - pager = client.list_versions(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_versions(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Version) for i in results) - -def test_list_versions_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - pages = list(client.list_versions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_versions_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_versions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_versions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Version) for i in responses) - - -@pytest.mark.asyncio -async def test_list_versions_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_versions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_versions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + pages = list(client.list_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( @@ -4518,20 +3821,122 @@ async def test_list_versions_async_pages(): dict, ], ) -def test_update_version(request_type, transport: str = "grpc"): +def test_update_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request_init["version"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -4541,13 +3946,17 @@ def test_update_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.update_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -4561,56 +3970,13 @@ def test_update_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_update_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -def test_update_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateVersionRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -def test_update_version_use_cached_wrapped_rpc(): +def test_update_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -4626,6 +3992,7 @@ def test_update_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_version] = mock_rpc + request = {} client.update_version(request) @@ -4639,284 +4006,247 @@ def test_update_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.update_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -@pytest.mark.asyncio -async def test_update_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_version_rest_required_fields( + request_type=apihub_service.UpdateVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_version - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_version - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_version(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_version(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.update_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) -@pytest.mark.asyncio -async def test_update_version_async_from_dict(): - await test_update_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateVersionRequest() - - request.version.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value = common_fields.Version() - client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "version.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "version", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateVersionRequest.pb( + apihub_service.UpdateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateVersionRequest() - - request.version.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "version.name=name_value", - ) in kw["metadata"] - - -def test_update_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_version( - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_version_flattened_error(): +def test_update_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_version( - apihub_service.UpdateVersionRequest(), - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_version(request) -@pytest.mark.asyncio -async def test_update_version_flattened_async(): - client = ApiHubAsyncClient( +def test_update_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_version( + # get arguments that satisfy an http rule for this method + sample_request = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( version=common_fields.Version(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_version_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_version( + client.update_version( apihub_service.UpdateVersionRequest(), version=common_fields.Version(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4924,86 +4254,43 @@ async def test_update_version_flattened_error_async(): dict, ], ) -def test_delete_version(request_type, transport: str = "grpc"): +def test_delete_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_version(request) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteVersionRequest() - assert args[0] == request + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest() - - -def test_delete_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteVersionRequest( - name="name_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest( - name="name_value", - ) + # Establish that the response is the type that we expect. + assert response is None -def test_delete_version_use_cached_wrapped_rpc(): +def test_delete_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5019,6 +4306,7 @@ def test_delete_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc + request = {} client.delete_version(request) @@ -5032,240 +4320,224 @@ def test_delete_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest() - - -@pytest.mark.asyncio -async def test_delete_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_version_rest_required_fields( + request_type=apihub_service.DeleteVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_version - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_version - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_version(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_version(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_version(request) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteVersionRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the response is the type that we expect. - assert response is None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) -@pytest.mark.asyncio -async def test_delete_version_async_from_dict(): - await test_delete_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_delete_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = None - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -@pytest.mark.asyncio -async def test_delete_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_version" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteVersionRequest.pb( + apihub_service.DeleteVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteVersionRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_version(request) + request = apihub_service.DeleteVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -def test_delete_version_flattened(): +def test_delete_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_version( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_version(request) -def test_delete_version_flattened_error(): +def test_delete_version_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - apihub_service.DeleteVersionRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_version_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_version( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_version_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_version( + client.delete_version( apihub_service.DeleteVersionRequest(), name="name_value", ) +def test_delete_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5273,32 +4545,155 @@ async def test_delete_version_flattened_error_async(): dict, ], ) -def test_create_spec(request_type, transport: str = "grpc"): +def test_create_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( - name="name_value", + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request_init["spec"] = { + "name": "name_value", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", display_name="display_name_value", source_uri="source_uri_value", parsing_mode=common_fields.Spec.ParsingMode.RELAXED, ) - response = client.create_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_spec(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Spec) @@ -5308,62 +4703,13 @@ def test_create_spec(request_type, transport: str = "grpc"): assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_create_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest() - - -def test_create_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateSpecRequest( - parent="parent_value", - spec_id="spec_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest( - parent="parent_value", - spec_id="spec_id_value", - ) - - -def test_create_spec_use_cached_wrapped_rpc(): +def test_create_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5379,6 +4725,7 @@ def test_create_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc + request = {} client.create_spec(request) @@ -5392,271 +4739,235 @@ def test_create_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.create_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest() - - -@pytest.mark.asyncio -async def test_create_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_spec_rest_required_fields( + request_type=apihub_service.CreateSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_spec - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_spec - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.create_spec(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_spec(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("spec_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.create_spec(request) + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateSpecRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_create_spec_async_from_dict(): - await test_create_spec_async(request_type=dict) - - -def test_create_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateSpecRequest() + response = client.create_spec(request) - request.parent = "parent_value" + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.create_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_create_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("specId",)) + & set( + ( + "parent", + "spec", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateSpecRequest.pb( + apihub_service.CreateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateSpecRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.create_spec(request) + request = apihub_service.CreateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_spec_flattened(): +def test_create_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_spec( - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].spec_id - mock_val = "spec_id_value" - assert arg == mock_val - - -def test_create_spec_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_spec( - apihub_service.CreateSpecRequest(), - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_spec(request) -@pytest.mark.asyncio -async def test_create_spec_flattened_async(): - client = ApiHubAsyncClient( +def test_create_spec_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_spec( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", spec=common_fields.Spec(name="name_value"), spec_id="spec_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].spec_id - mock_val = "spec_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_spec_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) + + +def test_create_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_spec( + client.create_spec( apihub_service.CreateSpecRequest(), parent="parent_value", spec=common_fields.Spec(name="name_value"), @@ -5664,6 +4975,12 @@ async def test_create_spec_flattened_error_async(): ) +def test_create_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5671,32 +4988,38 @@ async def test_create_spec_flattened_error_async(): dict, ], ) -def test_get_spec(request_type, transport: str = "grpc"): +def test_get_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( name="name_value", display_name="display_name_value", source_uri="source_uri_value", parsing_mode=common_fields.Spec.ParsingMode.RELAXED, ) - response = client.get_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Spec) @@ -5706,60 +5029,13 @@ def test_get_spec(request_type, transport: str = "grpc"): assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_get_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest() - - -def test_get_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest( - name="name_value", - ) - - -def test_get_spec_use_cached_wrapped_rpc(): +def test_get_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5775,6 +5051,7 @@ def test_get_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc + request = {} client.get_spec(request) @@ -5788,256 +5065,229 @@ def test_get_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.get_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest() - +def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_get_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.get_spec - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_spec - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.get_spec(request) + jsonified_request["name"] = "name_value" - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.get_spec(request) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.get_spec(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) -@pytest.mark.asyncio -async def test_get_spec_async_from_dict(): - await test_get_spec_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.get_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.get_spec(request) + request = apihub_service.GetSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_get_spec_flattened(): +def test_get_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_spec( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec(request) -def test_get_spec_flattened_error(): +def test_get_spec_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec( - apihub_service.GetSpecRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_spec_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_spec( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_spec_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_spec( + client.get_spec( apihub_service.GetSpecRequest(), name="name_value", ) +def test_get_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -6045,32 +5295,36 @@ async def test_get_spec_flattened_error_async(): dict, ], ) -def test_get_spec_contents(request_type, transport: str = "grpc"): +def test_get_spec_contents_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents( contents=b"contents_blob", mime_type="mime_type_value", ) - response = client.get_spec_contents(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecContentsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.SpecContents) @@ -6078,64 +5332,13 @@ def test_get_spec_contents(request_type, transport: str = "grpc"): assert response.mime_type == "mime_type_value" -def test_get_spec_contents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest() - - -def test_get_spec_contents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetSpecContentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec_contents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest( - name="name_value", - ) - - -def test_get_spec_contents_use_cached_wrapped_rpc(): +def test_get_spec_contents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -6153,6 +5356,7 @@ def test_get_spec_contents_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_spec_contents ] = mock_rpc + request = {} client.get_spec_contents(request) @@ -6166,268 +5370,235 @@ def test_get_spec_contents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_spec_contents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_get_spec_contents_rest_required_fields( + request_type=apihub_service.GetSpecContentsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_spec_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_get_spec_contents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_spec_contents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_spec_contents - ] = mock_rpc - - request = {} - await client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_spec_contents(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_spec_contents_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetSpecContentsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_spec_contents(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecContentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.SpecContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) -@pytest.mark.asyncio -async def test_get_spec_contents_async_from_dict(): - await test_get_spec_contents_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_spec_contents_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_spec_contents_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value = common_fields.SpecContents() - client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_spec_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_spec_contents_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_contents_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec_contents" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecContentsRequest.pb( + apihub_service.GetSpecContentsRequest() ) - await client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.SpecContents.to_json( + common_fields.SpecContents() + ) -def test_get_spec_contents_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetSpecContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.SpecContents() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_spec_contents( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_spec_contents_flattened_error(): +def test_get_spec_contents_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec_contents( - apihub_service.GetSpecContentsRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec_contents(request) -@pytest.mark.asyncio -async def test_get_spec_contents_flattened_async(): - client = ApiHubAsyncClient( +def test_get_spec_contents_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_spec_contents( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec_contents(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_spec_contents_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_spec_contents( + client.get_spec_contents( apihub_service.GetSpecContentsRequest(), name="name_value", ) +def test_get_spec_contents_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -6435,93 +5606,48 @@ async def test_get_spec_contents_flattened_error_async(): dict, ], ) -def test_list_specs(request_type, transport: str = "grpc"): +def test_list_specs_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse( next_page_token="next_page_token_value", ) - response = client.list_specs(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListSpecsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_specs(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSpecsPager) assert response.next_page_token == "next_page_token_value" -def test_list_specs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_specs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest() - - -def test_list_specs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListSpecsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_specs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_specs_use_cached_wrapped_rpc(): +def test_list_specs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -6537,6 +5663,7 @@ def test_list_specs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc + request = {} client.list_specs(request) @@ -6550,196 +5677,233 @@ def test_list_specs_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_specs_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_specs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_specs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_specs - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_specs - ] = mock_rpc - - request = {} - await client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - await client.list_specs(request) + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.asyncio -async def test_list_specs_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListSpecsRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_specs(request) + response = client.list_specs(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListSpecsRequest() - assert args[0] == request + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpecsAsyncPager) - assert response.next_page_token == "next_page_token_value" +def test_list_specs_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -@pytest.mark.asyncio -async def test_list_specs_async_from_dict(): - await test_list_specs_async(request_type=dict) + unset_fields = transport.list_specs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_list_specs_field_headers(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_specs_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_specs" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_specs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListSpecsRequest.pb( + apihub_service.ListSpecsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListSpecsRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListSpecsResponse.to_json( + apihub_service.ListSpecsResponse() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value = apihub_service.ListSpecsResponse() - client.list_specs(request) + request = apihub_service.ListSpecsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListSpecsResponse() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.list_specs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_list_specs_field_headers_async(): - client = ApiHubAsyncClient( +def test_list_specs_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListSpecsRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListSpecsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse() - ) - await client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_specs(request) -def test_list_specs_flattened(): +def test_list_specs_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_specs( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_specs(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) -def test_list_specs_flattened_error(): +def test_list_specs_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -6751,60 +5915,18 @@ def test_list_specs_flattened_error(): ) -@pytest.mark.asyncio -async def test_list_specs_flattened_async(): - client = ApiHubAsyncClient( +def test_list_specs_rest_pager(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_specs( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_specs_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_specs( - apihub_service.ListSpecsRequest(), - parent="parent_value", - ) - - -def test_list_specs_pager(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListSpecsResponse( specs=[ common_fields.Spec(), @@ -6829,162 +5951,29 @@ def test_list_specs_pager(transport_name: str = "grpc"): common_fields.Spec(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_specs(request={}, retry=retry, timeout=timeout) + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + pager = client.list_specs(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Spec) for i in results) - -def test_list_specs_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - pages = list(client.list_specs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_specs_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_specs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_specs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Spec) for i in responses) - - -@pytest.mark.asyncio -async def test_list_specs_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_specs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_specs(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_specs(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6996,91 +5985,173 @@ async def test_list_specs_async_pages(): dict, ], ) -def test_update_spec(request_type, transport: str = "grpc"): +def test_update_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - response = client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateSpecRequest() - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request_init["spec"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_update_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_update_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + subfields_not_in_runtime = [] - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateSpecRequest() + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", + display_name="display_name_value", + source_uri="source_uri_value", + parsing_mode=common_fields.Spec.ParsingMode.RELAXED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Spec) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_uri == "source_uri_value" + assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_update_spec_use_cached_wrapped_rpc(): +def test_update_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7096,6 +6167,7 @@ def test_update_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc + request = {} client.update_spec(request) @@ -7109,268 +6181,245 @@ def test_update_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.update_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() - - -@pytest.mark.asyncio -async def test_update_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_spec_rest_required_fields( + request_type=apihub_service.UpdateSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_spec - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_spec - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_spec(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_spec(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.update_spec(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateSpecRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) -@pytest.mark.asyncio -async def test_update_spec_async_from_dict(): - await test_update_spec_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateSpecRequest() - - request.spec.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "spec.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "spec", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateSpecRequest.pb( + apihub_service.UpdateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateSpecRequest() - - request.spec.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.update_spec(request) + request = apihub_service.UpdateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.update_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "spec.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_update_spec_flattened(): +def test_update_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_spec( - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_spec(request) -def test_update_spec_flattened_error(): +def test_update_spec_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_spec( - apihub_service.UpdateSpecRequest(), - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_spec_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # get arguments that satisfy an http rule for this method + sample_request = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_spec( + # get truthy value for each flattened field + mock_args = dict( spec=common_fields.Spec(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_spec_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_spec( + client.update_spec( apihub_service.UpdateSpecRequest(), spec=common_fields.Spec(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7378,86 +6427,43 @@ async def test_update_spec_flattened_error_async(): dict, ], ) -def test_delete_spec(request_type, transport: str = "grpc"): +def test_delete_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_spec(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_spec(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest() - - -def test_delete_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest( - name="name_value", - ) - - -def test_delete_spec_use_cached_wrapped_rpc(): +def test_delete_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7473,6 +6479,7 @@ def test_delete_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc + request = {} client.delete_spec(request) @@ -7486,240 +6493,222 @@ def test_delete_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest() - - -@pytest.mark.asyncio -async def test_delete_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_spec_rest_required_fields( + request_type=apihub_service.DeleteSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_spec - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_spec - ] = mock_rpc - - request = {} - await client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteSpecRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_spec(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteSpecRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_spec_async_from_dict(): - await test_delete_spec_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_spec_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteSpecRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value = None - client.delete_spec(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_spec(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_spec_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_spec_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_spec" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteSpecRequest.pb( + apihub_service.DeleteSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_spec( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_spec_flattened_error(): +def test_delete_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_spec( - apihub_service.DeleteSpecRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_spec_flattened_async(): - client = ApiHubAsyncClient( + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_spec(request) + + +def test_delete_spec_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_spec( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_spec_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_spec( + client.delete_spec( apihub_service.DeleteSpecRequest(), name="name_value", ) +def test_delete_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7727,32 +6716,36 @@ async def test_delete_spec_flattened_error_async(): dict, ], ) -def test_get_api_operation(request_type, transport: str = "grpc"): +def test_get_api_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation( name="name_value", spec="spec_value", ) - response = client.get_api_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiOperationRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ApiOperation) @@ -7760,64 +6753,13 @@ def test_get_api_operation(request_type, transport: str = "grpc"): assert response.spec == "spec_value" -def test_get_api_operation_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest() - - -def test_get_api_operation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetApiOperationRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_operation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest( - name="name_value", - ) - - -def test_get_api_operation_use_cached_wrapped_rpc(): +def test_get_api_operation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7835,6 +6777,7 @@ def test_get_api_operation_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_api_operation ] = mock_rpc + request = {} client.get_api_operation(request) @@ -7848,268 +6791,235 @@ def test_get_api_operation_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_operation_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - ) - response = await client.get_api_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest() - - -@pytest.mark.asyncio -async def test_get_api_operation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_api_operation_rest_required_fields( + request_type=apihub_service.GetApiOperationRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_api_operation - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api_operation - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api_operation(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api_operation(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_operation_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetApiOperationRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - ) - response = await client.get_api_operation(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiOperationRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiOperation) - assert response.name == "name_value" - assert response.spec == "spec_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) -@pytest.mark.asyncio -async def test_get_api_operation_async_from_dict(): - await test_get_api_operation_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_operation_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiOperationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value = common_fields.ApiOperation() - client.get_api_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api_operation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_operation_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_operation_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiOperationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api_operation" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api_operation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiOperationRequest.pb( + apihub_service.GetApiOperationRequest() ) - await client.get_api_operation(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiOperation.to_json( + common_fields.ApiOperation() + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_api_operation_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetApiOperationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiOperation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_api_operation( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_api_operation_flattened_error(): +def test_get_api_operation_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiOperationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_operation( - apihub_service.GetApiOperationRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_operation(request) -@pytest.mark.asyncio -async def test_get_api_operation_flattened_async(): - client = ApiHubAsyncClient( +def test_get_api_operation_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api_operation( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_operation(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_operation_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_api_operation_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api_operation( + client.get_api_operation( apihub_service.GetApiOperationRequest(), name="name_value", ) +def test_get_api_operation_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -8117,99 +7027,48 @@ async def test_get_api_operation_flattened_error_async(): dict, ], ) -def test_list_api_operations(request_type, transport: str = "grpc"): +def test_list_api_operations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse( next_page_token="next_page_token_value", ) - response = client.list_api_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApiOperationsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_api_operations(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListApiOperationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_api_operations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_api_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest() - - -def test_list_api_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListApiOperationsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_api_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_api_operations_use_cached_wrapped_rpc(): +def test_list_api_operations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -8229,6 +7088,7 @@ def test_list_api_operations_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_api_operations ] = mock_rpc + request = {} client.list_api_operations(request) @@ -8242,278 +7102,258 @@ def test_list_api_operations_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_api_operations_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_api_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest() - - -@pytest.mark.asyncio -async def test_list_api_operations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_api_operations_rest_required_fields( + request_type=apihub_service.ListApiOperationsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_api_operations - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_api_operations - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_api_operations(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_api_operations(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_api_operations_async( - transport: str = "grpc_asyncio", - request_type=apihub_service.ListApiOperationsRequest, -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_api_operations(request) + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApiOperationsRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApiOperationsAsyncPager) - assert response.next_page_token == "next_page_token_value" + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_api_operations_async_from_dict(): - await test_list_api_operations_async(request_type=dict) + response = client.list_api_operations(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_api_operations_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApiOperationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value = apihub_service.ListApiOperationsResponse() - client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_list_api_operations_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_api_operations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_api_operations_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_api_operations_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApiOperationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_api_operations" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_api_operations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApiOperationsRequest.pb( + apihub_service.ListApiOperationsRequest() ) - await client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( + apihub_service.ListApiOperationsResponse() + ) -def test_list_api_operations_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListApiOperationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApiOperationsResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_api_operations( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_api_operations_flattened_error(): +def test_list_api_operations_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_api_operations( - apihub_service.ListApiOperationsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_api_operations(request) -@pytest.mark.asyncio -async def test_list_api_operations_flattened_async(): - client = ApiHubAsyncClient( + +def test_list_api_operations_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_api_operations( + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_api_operations(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_api_operations_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_api_operations_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_api_operations( + client.list_api_operations( apihub_service.ListApiOperationsRequest(), parent="parent_value", ) -def test_list_api_operations_pager(transport_name: str = "grpc"): +def test_list_api_operations_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListApiOperationsResponse( api_operations=[ common_fields.ApiOperation(), @@ -8538,205 +7378,74 @@ def test_list_api_operations_pager(transport_name: str = "grpc"): common_fields.ApiOperation(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListApiOperationsResponse.to_json(x) for x in response ) - pager = client.list_api_operations(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_api_operations(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.ApiOperation) for i in results) - -def test_list_api_operations_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_api_operations(request={}).pages) + pages = list(client.list_api_operations(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_api_operations_async_pager(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetDefinitionRequest, + dict, + ], +) +def test_get_definition_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_api_operations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.ApiOperation) for i in responses) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition( + name="name_value", + spec="spec_value", + type_=common_fields.Definition.Type.SCHEMA, + ) -@pytest.mark.asyncio -async def test_list_api_operations_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_api_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDefinitionRequest, - dict, - ], -) -def test_get_definition(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.get_definition(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDefinitionRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Definition) assert response.name == "name_value" @@ -8744,60 +7453,13 @@ def test_get_definition(request_type, transport: str = "grpc"): assert response.type_ == common_fields.Definition.Type.SCHEMA -def test_get_definition_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_definition() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest() - - -def test_get_definition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDefinitionRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_definition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest( - name="name_value", - ) - - -def test_get_definition_use_cached_wrapped_rpc(): +def test_get_definition_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -8813,6 +7475,7 @@ def test_get_definition_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc + request = {} client.get_definition(request) @@ -8826,259 +7489,235 @@ def test_get_definition_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_definition_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - ) - response = await client.get_definition() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest() - - -@pytest.mark.asyncio -async def test_get_definition_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_definition_rest_required_fields( + request_type=apihub_service.GetDefinitionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_definition - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_definition - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_definition(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_definition(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_definition_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDefinitionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - ) - response = await client.get_definition(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDefinitionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Definition) - assert response.name == "name_value" - assert response.spec == "spec_value" - assert response.type_ == common_fields.Definition.Type.SCHEMA + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_definition(request) -@pytest.mark.asyncio -async def test_get_definition_async_from_dict(): - await test_get_definition_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_definition_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_definition_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDefinitionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value = common_fields.Definition() - client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_definition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_definition_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_definition_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_definition" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_definition" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDefinitionRequest.pb( + apihub_service.GetDefinitionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDefinitionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Definition.to_json( common_fields.Definition() ) - await client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_definition_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetDefinitionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Definition() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_definition( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_definition_flattened_error(): +def test_get_definition_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDefinitionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_definition( - apihub_service.GetDefinitionRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_definition(request) -@pytest.mark.asyncio -async def test_get_definition_flattened_async(): - client = ApiHubAsyncClient( +def test_get_definition_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_definition( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_definition(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_definition_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_definition_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_definition( + client.get_definition( apihub_service.GetDefinitionRequest(), name="name_value", ) +def test_get_definition_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9086,22 +7725,116 @@ async def test_get_definition_flattened_error_async(): dict, ], ) -def test_create_deployment(request_type, transport: str = "grpc"): +def test_create_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( name="name_value", display_name="display_name_value", description="description_value", @@ -9109,13 +7842,17 @@ def test_create_deployment(request_type, transport: str = "grpc"): endpoints=["endpoints_value"], api_versions=["api_versions_value"], ) - response = client.create_deployment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Deployment) @@ -9127,66 +7864,13 @@ def test_create_deployment(request_type, transport: str = "grpc"): assert response.api_versions == ["api_versions_value"] -def test_create_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest() - - -def test_create_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateDeploymentRequest( - parent="parent_value", - deployment_id="deployment_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest( - parent="parent_value", - deployment_id="deployment_id_value", - ) - - -def test_create_deployment_use_cached_wrapped_rpc(): +def test_create_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -9204,6 +7888,7 @@ def test_create_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_deployment ] = mock_rpc + request = {} client.create_deployment(request) @@ -9217,293 +7902,233 @@ def test_create_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_deployment_rest_required_fields( + request_type=apihub_service.CreateDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.create_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.create_deployment - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deployment_id",)) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_deployment - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - request = {} - await client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.create_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) -@pytest.mark.asyncio -async def test_create_deployment_async_from_dict(): - await test_create_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDeploymentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = common_fields.Deployment() - client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("deploymentId",)) + & set( + ( + "parent", + "deployment", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDeploymentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDeploymentRequest.pb( + apihub_service.CreateDeploymentRequest() ) - await client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) -def test_create_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_deployment( - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].deployment_id - mock_val = "deployment_id_value" - assert arg == mock_val - - -def test_create_deployment_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - apihub_service.CreateDeploymentRequest(), - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) -@pytest.mark.asyncio -async def test_create_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_create_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deployment( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", deployment=common_fields.Deployment(name="name_value"), deployment_id="deployment_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].deployment_id - mock_val = "deployment_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_deployment_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_deployment( + client.create_deployment( apihub_service.CreateDeploymentRequest(), parent="parent_value", deployment=common_fields.Deployment(name="name_value"), @@ -9511,6 +8136,12 @@ async def test_create_deployment_flattened_error_async(): ) +def test_create_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9518,20 +8149,20 @@ async def test_create_deployment_flattened_error_async(): dict, ], ) -def test_get_deployment(request_type, transport: str = "grpc"): +def test_get_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( name="name_value", display_name="display_name_value", description="description_value", @@ -9539,13 +8170,17 @@ def test_get_deployment(request_type, transport: str = "grpc"): endpoints=["endpoints_value"], api_versions=["api_versions_value"], ) - response = client.get_deployment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Deployment) @@ -9557,65 +8192,18 @@ def test_get_deployment(request_type, transport: str = "grpc"): assert response.api_versions == ["api_versions_value"] -def test_get_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +def test_get_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client.get_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest() - -def test_get_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDeploymentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest( - name="name_value", - ) - - -def test_get_deployment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_deployment in client._transport._wrapped_methods @@ -9626,6 +8214,7 @@ def test_get_deployment_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc + request = {} client.get_deployment(request) @@ -9639,362 +8228,280 @@ def test_get_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.get_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest() - - -@pytest.mark.asyncio -async def test_get_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_deployment_rest_required_fields( + request_type=apihub_service.GetDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_deployment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_deployment - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_deployment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_deployment(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.get_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) -@pytest.mark.asyncio -async def test_get_deployment_async_from_dict(): - await test_get_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = common_fields.Deployment() - client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDeploymentRequest.pb( + apihub_service.GetDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( common_fields.Deployment() ) - await client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_get_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_deployment( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_deployment_flattened_error(): +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - apihub_service.GetDeploymentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) -@pytest.mark.asyncio -async def test_get_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_get_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deployment( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_deployment_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_deployment( + client.get_deployment( apihub_service.GetDeploymentRequest(), name="name_value", ) -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDeploymentsRequest, +def test_get_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListDeploymentsRequest, dict, ], ) -def test_list_deployments(request_type, transport: str = "grpc"): +def test_list_deployments_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse( next_page_token="next_page_token_value", ) - response = client.list_deployments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDeploymentsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeploymentsPager) assert response.next_page_token == "next_page_token_value" -def test_list_deployments_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_deployments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest() - - -def test_list_deployments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListDeploymentsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_deployments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_deployments_use_cached_wrapped_rpc(): +def test_list_deployments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10012,6 +8519,7 @@ def test_list_deployments_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_deployments ] = mock_rpc + request = {} client.list_deployments(request) @@ -10025,263 +8533,254 @@ def test_list_deployments_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_deployments_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_deployments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest() - - -@pytest.mark.asyncio -async def test_list_deployments_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_deployments_rest_required_fields( + request_type=apihub_service.ListDeploymentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_deployments - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_deployments - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_deployments(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_deployments(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_deployments_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListDeploymentsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_deployments(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDeploymentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) -@pytest.mark.asyncio -async def test_list_deployments_async_from_dict(): - await test_list_deployments_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_deployments_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDeploymentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = apihub_service.ListDeploymentsResponse() - client.list_deployments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_deployments_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDeploymentsRequest.pb( + apihub_service.ListDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDeploymentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( apihub_service.ListDeploymentsResponse() ) - await client.list_deployments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = apihub_service.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDeploymentsResponse() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() -def test_list_deployments_flattened(): + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deployments( - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) -def test_list_deployments_flattened_error(): +def test_list_deployments_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - apihub_service.ListDeploymentsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_deployments_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deployments( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_deployments_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_deployments( + client.list_deployments( apihub_service.ListDeploymentsRequest(), parent="parent_value", ) -def test_list_deployments_pager(transport_name: str = "grpc"): +def test_list_deployments_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListDeploymentsResponse( deployments=[ common_fields.Deployment(), @@ -10306,162 +8805,29 @@ def test_list_deployments_pager(transport_name: str = "grpc"): common_fields.Deployment(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDeploymentsResponse.to_json(x) for x in response ) - pager = client.list_deployments(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_deployments(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Deployment) for i in results) - -def test_list_deployments_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deployments(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_deployments_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deployments( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Deployment) for i in responses) - - -@pytest.mark.asyncio -async def test_list_deployments_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_deployments(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_deployments(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10473,101 +8839,154 @@ async def test_list_deployments_async_pages(): dict, ], ) -def test_update_deployment(request_type, transport: str = "grpc"): +def test_update_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - response = client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDeploymentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "name": "projects/sample1/locations/sample2/deployments/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] -def test_update_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_update_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateDeploymentRequest() + subfields_not_in_runtime = [] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( + name="name_value", + display_name="display_name_value", + description="description_value", + resource_uri="resource_uri_value", + endpoints=["endpoints_value"], + api_versions=["api_versions_value"], ) - client.update_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Deployment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.resource_uri == "resource_uri_value" + assert response.endpoints == ["endpoints_value"] + assert response.api_versions == ["api_versions_value"] -def test_update_deployment_use_cached_wrapped_rpc(): +def test_update_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10585,6 +9004,7 @@ def test_update_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_deployment ] = mock_rpc + request = {} client.update_deployment(request) @@ -10598,290 +9018,245 @@ def test_update_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.update_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() - - -@pytest.mark.asyncio -async def test_update_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_deployment_rest_required_fields( + request_type=apihub_service.UpdateDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_deployment - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_deployment - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_deployment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_deployment(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.update_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) -@pytest.mark.asyncio -async def test_update_deployment_async_from_dict(): - await test_update_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDeploymentRequest() - - request.deployment.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = common_fields.Deployment() - client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "deployment.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "deployment", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDeploymentRequest() - - request.deployment.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDeploymentRequest.pb( + apihub_service.UpdateDeploymentRequest() ) - await client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "deployment.name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) -def test_update_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_deployment( - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_deployment_flattened_error(): +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deployment( - apihub_service.UpdateDeploymentRequest(), - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) -@pytest.mark.asyncio -async def test_update_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_update_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deployment( + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( deployment=common_fields.Deployment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_deployment_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_deployment( + client.update_deployment( apihub_service.UpdateDeploymentRequest(), deployment=common_fields.Deployment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -10889,92 +9264,41 @@ async def test_update_deployment_flattened_error_async(): dict, ], ) -def test_delete_deployment(request_type, transport: str = "grpc"): +def test_delete_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deployment(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest() - - -def test_delete_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteDeploymentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest( - name="name_value", - ) - - -def test_delete_deployment_use_cached_wrapped_rpc(): +def test_delete_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10992,6 +9316,7 @@ def test_delete_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_deployment ] = mock_rpc + request = {} client.delete_deployment(request) @@ -11005,252 +9330,220 @@ def test_delete_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest() - - -@pytest.mark.asyncio -async def test_delete_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_deployment_rest_required_fields( + request_type=apihub_service.DeleteDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_deployment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_deployment - ] = mock_rpc - - request = {} - await client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteDeploymentRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deployment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDeploymentRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_deployment_async_from_dict(): - await test_delete_deployment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_deployment_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDeploymentRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = None - client.delete_deployment(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_deployment(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_deployment_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_deployment_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deployment( - name="name_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDeploymentRequest.pb( + apihub_service.DeleteDeploymentRequest() ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() -def test_delete_deployment_flattened_error(): +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - apihub_service.DeleteDeploymentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) -@pytest.mark.asyncio -async def test_delete_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deployment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_deployment_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_deployment( + client.delete_deployment( apihub_service.DeleteDeploymentRequest(), name="name_value", ) +def test_delete_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -11258,20 +9551,107 @@ async def test_delete_deployment_flattened_error_async(): dict, ], ) -def test_create_attribute(request_type, transport: str = "grpc"): +def test_create_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["attribute"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -11281,13 +9661,17 @@ def test_create_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.create_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -11304,62 +9688,13 @@ def test_create_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_create_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest() - - -def test_create_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateAttributeRequest( - parent="parent_value", - attribute_id="attribute_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest( - parent="parent_value", - attribute_id="attribute_id_value", - ) - - -def test_create_attribute_use_cached_wrapped_rpc(): +def test_create_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -11377,6 +9712,7 @@ def test_create_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_attribute ] = mock_rpc + request = {} client.create_attribute(request) @@ -11390,290 +9726,232 @@ def test_create_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.create_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest() +def test_create_attribute_rest_required_fields( + request_type=apihub_service.CreateAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) -@pytest.mark.asyncio -async def test_create_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify fields with default values are dropped - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Ensure method has been cached - assert ( - client._client._transport.create_attribute - in client._client._transport._wrapped_methods - ) + # verify required fields with default values are now present - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_attribute - ] = mock_rpc - - request = {} - await client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_attribute(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("attribute_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.create_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) -@pytest.mark.asyncio -async def test_create_attribute_async_from_dict(): - await test_create_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateAttributeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("attributeId",)) + & set( + ( + "parent", + "attribute", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_attribute_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateAttributeRequest.pb( + apihub_service.CreateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateAttributeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_attribute( - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].attribute_id - mock_val = "attribute_id_value" - assert arg == mock_val - - -def test_create_attribute_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_attribute( - apihub_service.CreateAttributeRequest(), - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_attribute(request) -@pytest.mark.asyncio -async def test_create_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_create_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_attribute( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", attribute=common_fields.Attribute(name="name_value"), attribute_id="attribute_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].attribute_id - mock_val = "attribute_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_attribute_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) + + +def test_create_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_attribute( + client.create_attribute( apihub_service.CreateAttributeRequest(), parent="parent_value", attribute=common_fields.Attribute(name="name_value"), @@ -11681,6 +9959,12 @@ async def test_create_attribute_flattened_error_async(): ) +def test_create_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -11688,20 +9972,20 @@ async def test_create_attribute_flattened_error_async(): dict, ], ) -def test_get_attribute(request_type, transport: str = "grpc"): +def test_get_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -11711,13 +9995,17 @@ def test_get_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.get_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -11734,60 +10022,13 @@ def test_get_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_get_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest() - - -def test_get_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetAttributeRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest( - name="name_value", - ) - - -def test_get_attribute_use_cached_wrapped_rpc(): +def test_get_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -11803,6 +10044,7 @@ def test_get_attribute_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc + request = {} client.get_attribute(request) @@ -11816,277 +10058,232 @@ def test_get_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.get_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest() - - -@pytest.mark.asyncio -async def test_get_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_attribute_rest_required_fields( + request_type=apihub_service.GetAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_attribute - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_attribute - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_attribute(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_attribute(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.get_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) -@pytest.mark.asyncio -async def test_get_attribute_async_from_dict(): - await test_get_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_attribute_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetAttributeRequest.pb( + apihub_service.GetAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_attribute( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_attribute_flattened_error(): +def test_get_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_attribute( - apihub_service.GetAttributeRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_attribute(request) -@pytest.mark.asyncio -async def test_get_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_get_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_attribute( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_attribute_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_attribute( + client.get_attribute( apihub_service.GetAttributeRequest(), name="name_value", ) +def test_get_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12094,20 +10291,109 @@ async def test_get_attribute_flattened_error_async(): dict, ], ) -def test_update_attribute(request_type, transport: str = "grpc"): +def test_update_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request_init["attribute"] = { + "name": "projects/sample1/locations/sample2/attributes/sample3", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -12117,13 +10403,17 @@ def test_update_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.update_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -12140,56 +10430,13 @@ def test_update_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_update_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -def test_update_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateAttributeRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -def test_update_attribute_use_cached_wrapped_rpc(): +def test_update_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12207,6 +10454,7 @@ def test_update_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_attribute ] = mock_rpc + request = {} client.update_attribute(request) @@ -12220,287 +10468,245 @@ def test_update_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.update_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -@pytest.mark.asyncio -async def test_update_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_attribute_rest_required_fields( + request_type=apihub_service.UpdateAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_attribute - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_attribute - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_attribute(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_attribute(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.update_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) -@pytest.mark.asyncio -async def test_update_attribute_async_from_dict(): - await test_update_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateAttributeRequest() - - request.attribute.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.update_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "attribute.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "attribute", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_attribute_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateAttributeRequest.pb( + apihub_service.UpdateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateAttributeRequest() - - request.attribute.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.update_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "attribute.name=name_value", - ) in kw["metadata"] - - -def test_update_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_attribute( - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_attribute_flattened_error(): +def test_update_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribute( - apihub_service.UpdateAttributeRequest(), - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_attribute(request) -@pytest.mark.asyncio -async def test_update_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_update_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_attribute( + # get arguments that satisfy an http rule for this method + sample_request = { + "attribute": { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( attribute=common_fields.Attribute(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_attribute_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" + % client.transport._host, + args[1], + ) + + +def test_update_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_attribute( + client.update_attribute( apihub_service.UpdateAttributeRequest(), attribute=common_fields.Attribute(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12508,86 +10714,41 @@ async def test_update_attribute_flattened_error_async(): dict, ], ) -def test_delete_attribute(request_type, transport: str = "grpc"): +def test_delete_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_attribute(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_attribute(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest() - - -def test_delete_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteAttributeRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest( - name="name_value", - ) - - -def test_delete_attribute_use_cached_wrapped_rpc(): +def test_delete_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12605,6 +10766,7 @@ def test_delete_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_attribute ] = mock_rpc + request = {} client.delete_attribute(request) @@ -12618,240 +10780,219 @@ def test_delete_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest() - - -@pytest.mark.asyncio -async def test_delete_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_attribute_rest_required_fields( + request_type=apihub_service.DeleteAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_attribute - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_attribute - ] = mock_rpc - - request = {} - await client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteAttributeRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_attribute(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteAttributeRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_attribute_async_from_dict(): - await test_delete_attribute_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_attribute_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value = None - client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_attribute_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteAttributeRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - request.name = "name_value" + response = client.delete_attribute(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_attribute(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] +def test_delete_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_attribute_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_attribute" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteAttributeRequest.pb( + apihub_service.DeleteAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_attribute( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_attribute_flattened_error(): +def test_delete_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_attribute( - apihub_service.DeleteAttributeRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_attribute(request) -@pytest.mark.asyncio -async def test_delete_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_attribute( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_attribute_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_attribute( + client.delete_attribute( apihub_service.DeleteAttributeRequest(), name="name_value", ) +def test_delete_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12859,93 +11000,46 @@ async def test_delete_attribute_flattened_error_async(): dict, ], ) -def test_list_attributes(request_type, transport: str = "grpc"): +def test_list_attributes_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse( next_page_token="next_page_token_value", ) - response = client.list_attributes(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListAttributesRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAttributesPager) assert response.next_page_token == "next_page_token_value" -def test_list_attributes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_attributes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest() - - -def test_list_attributes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListAttributesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_attributes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_attributes_use_cached_wrapped_rpc(): +def test_list_attributes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12961,6 +11055,7 @@ def test_list_attributes_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc + request = {} client.list_attributes(request) @@ -12974,263 +11069,253 @@ def test_list_attributes_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_attributes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_attributes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest() - - -@pytest.mark.asyncio -async def test_list_attributes_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_attributes_rest_required_fields( + request_type=apihub_service.ListAttributesRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_attributes - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_attributes - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_attributes(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_attributes(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_attributes_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListAttributesRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_attributes(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListAttributesRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAttributesAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) -@pytest.mark.asyncio -async def test_list_attributes_async_from_dict(): - await test_list_attributes_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_attributes_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_attributes_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListAttributesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value = apihub_service.ListAttributesResponse() - client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_attributes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_attributes_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_attributes_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_attributes" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_attributes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListAttributesRequest.pb( + apihub_service.ListAttributesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListAttributesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListAttributesResponse.to_json( apihub_service.ListAttributesResponse() ) - await client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -def test_list_attributes_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListAttributesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListAttributesResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_attributes( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_attributes_flattened_error(): +def test_list_attributes_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListAttributesRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_attributes( - apihub_service.ListAttributesRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_attributes(request) -@pytest.mark.asyncio -async def test_list_attributes_flattened_async(): - client = ApiHubAsyncClient( +def test_list_attributes_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_attributes( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_attributes(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_attributes_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_attributes_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_attributes( + client.list_attributes( apihub_service.ListAttributesRequest(), parent="parent_value", ) -def test_list_attributes_pager(transport_name: str = "grpc"): +def test_list_attributes_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListAttributesResponse( attributes=[ common_fields.Attribute(), @@ -13255,262 +11340,80 @@ def test_list_attributes_pager(transport_name: str = "grpc"): common_fields.Attribute(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListAttributesResponse.to_json(x) for x in response ) - pager = client.list_attributes(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_attributes(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Attribute) for i in results) + pages = list(client.list_attributes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + -def test_list_attributes_pages(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.SearchResourcesRequest, + dict, + ], +) +def test_search_resources_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse( + next_page_token="next_page_token_value", ) - pages = list(client.list_attributes(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_list_attributes_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_attributes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_attributes( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Attribute) for i in responses) - - -@pytest.mark.asyncio -async def test_list_attributes_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_attributes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_attributes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.SearchResourcesRequest, - dict, - ], -) -def test_search_resources(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.search_resources(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.SearchResourcesRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchResourcesPager) assert response.next_page_token == "next_page_token_value" -def test_search_resources_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.search_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest() - - -def test_search_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.SearchResourcesRequest( - location="location_value", - query="query_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.search_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest( - location="location_value", - query="query_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_search_resources_use_cached_wrapped_rpc(): +def test_search_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -13528,6 +11431,7 @@ def test_search_resources_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.search_resources ] = mock_rpc + request = {} client.search_resources(request) @@ -13541,273 +11445,252 @@ def test_search_resources_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_resources_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.search_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest() - - -@pytest.mark.asyncio -async def test_search_resources_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_search_resources_rest_required_fields( + request_type=apihub_service.SearchResourcesRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.search_resources - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.search_resources - ] = mock_rpc - - request = {} - await client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_search_resources_async( - transport: str = "grpc_asyncio", request_type=apihub_service.SearchResourcesRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["location"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.search_resources(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.SearchResourcesRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchResourcesAsyncPager) - assert response.next_page_token == "next_page_token_value" + # verify required fields with default values are now present + jsonified_request["location"] = "location_value" + jsonified_request["query"] = "query_value" -@pytest.mark.asyncio -async def test_search_resources_async_from_dict(): - await test_search_resources_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" -def test_search_resources_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.SearchResourcesRequest() - - request.location = "location_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value = apihub_service.SearchResourcesResponse() - client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "location=location_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_search_resources_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.SearchResourcesRequest() + response_value = Response() + response_value.status_code = 200 - request.location = "location_value" + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse() - ) - await client.search_resources(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.search_resources(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "location=location_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_search_resources_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_search_resources_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_resources( - location="location_value", - query="query_value", + unset_fields = transport.search_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "location", + "query", + ) ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].query - mock_val = "query_value" - assert arg == mock_val + ) -def test_search_resources_flattened_error(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_resources_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_search_resources" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_search_resources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.SearchResourcesRequest.pb( + apihub_service.SearchResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.SearchResourcesResponse.to_json( + apihub_service.SearchResourcesResponse() + ) + + request = apihub_service.SearchResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.SearchResourcesResponse() + client.search_resources( - apihub_service.SearchResourcesRequest(), - location="location_value", - query="query_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -@pytest.mark.asyncio -async def test_search_resources_flattened_async(): - client = ApiHubAsyncClient( +def test_search_resources_rest_bad_request( + transport: str = "rest", request_type=apihub_service.SearchResourcesRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse() + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_resources( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_resources(request) + + +def test_search_resources_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( location="location_value", query="query_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].query - mock_val = "query_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_search_resources_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}:searchResources" + % client.transport._host, + args[1], + ) + + +def test_search_resources_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.search_resources( + client.search_resources( apihub_service.SearchResourcesRequest(), location="location_value", query="query_value", ) -def test_search_resources_pager(transport_name: str = "grpc"): +def test_search_resources_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.SearchResourcesResponse( search_results=[ apihub_service.SearchResult(), @@ -13832,162 +11715,29 @@ def test_search_resources_pager(transport_name: str = "grpc"): apihub_service.SearchResult(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("location", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.SearchResourcesResponse.to_json(x) for x in response ) - pager = client.search_resources(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"location": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.search_resources(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, apihub_service.SearchResult) for i in results) - -def test_search_resources_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_resources(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_search_resources_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_resources), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_resources( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, apihub_service.SearchResult) for i in responses) - - -@pytest.mark.asyncio -async def test_search_resources_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_resources), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_resources(request={}) - ).pages: - pages.append(page_) + pages = list(client.search_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -13999,35 +11749,115 @@ async def test_search_resources_async_pages(): dict, ], ) -def test_create_external_api(request_type, transport: str = "grpc"): +def test_create_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["external_api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( + name="name_value", + display_name="display_name_value", + description="description_value", + endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.create_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14038,66 +11868,13 @@ def test_create_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_create_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest() - - -def test_create_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateExternalApiRequest( - parent="parent_value", - external_api_id="external_api_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest( - parent="parent_value", - external_api_id="external_api_id_value", - ) - - -def test_create_external_api_use_cached_wrapped_rpc(): +def test_create_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14117,6 +11894,7 @@ def test_create_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_external_api ] = mock_rpc + request = {} client.create_external_api(request) @@ -14130,291 +11908,233 @@ def test_create_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.create_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest() - - -@pytest.mark.asyncio -async def test_create_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_external_api - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_external_api - ] = mock_rpc - - request = {} - await client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_external_api_async( - transport: str = "grpc_asyncio", +def test_create_external_api_rest_required_fields( request_type=apihub_service.CreateExternalApiRequest, ): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.create_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateExternalApiRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.asyncio -async def test_create_external_api_async_from_dict(): - await test_create_external_api_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("external_api_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_create_external_api_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateExternalApiRequest() - - request.parent = "parent_value" + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value = common_fields.ExternalApi() - client.create_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) -@pytest.mark.asyncio -async def test_create_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateExternalApiRequest() - request.parent = "parent_value" +def test_create_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() + unset_fields = transport.create_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("externalApiId",)) + & set( + ( + "parent", + "externalApi", + ) ) - await client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + ) -def test_create_external_api_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_external_api( - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateExternalApiRequest.pb( + apihub_service.CreateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].external_api_id - mock_val = "external_api_id_value" - assert arg == mock_val - - -def test_create_external_api_flattened_error(): + request = apihub_service.CreateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() + + client.create_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_external_api( - apihub_service.CreateExternalApiRequest(), - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_external_api(request) -@pytest.mark.asyncio -async def test_create_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_create_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_external_api( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", external_api=common_fields.ExternalApi(name="name_value"), external_api_id="external_api_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].external_api_id - mock_val = "external_api_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_external_api_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) + + +def test_create_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_external_api( + client.create_external_api( apihub_service.CreateExternalApiRequest(), parent="parent_value", external_api=common_fields.ExternalApi(name="name_value"), @@ -14422,6 +12142,12 @@ async def test_create_external_api_flattened_error_async(): ) +def test_create_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -14429,33 +12155,37 @@ async def test_create_external_api_flattened_error_async(): dict, ], ) -def test_get_external_api(request_type, transport: str = "grpc"): +def test_get_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( name="name_value", display_name="display_name_value", description="description_value", endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.get_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14466,60 +12196,13 @@ def test_get_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_get_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest() - - -def test_get_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetExternalApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest( - name="name_value", - ) - - -def test_get_external_api_use_cached_wrapped_rpc(): +def test_get_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14537,6 +12220,7 @@ def test_get_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_external_api ] = mock_rpc + request = {} client.get_external_api(request) @@ -14550,265 +12234,233 @@ def test_get_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.get_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest() - - -@pytest.mark.asyncio -async def test_get_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_external_api_rest_required_fields( + request_type=apihub_service.GetExternalApiRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_external_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_external_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_external_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_external_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_external_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetExternalApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.get_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetExternalApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) -@pytest.mark.asyncio -async def test_get_external_api_async_from_dict(): - await test_get_external_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_external_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetExternalApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value = common_fields.ExternalApi() - client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_external_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetExternalApiRequest.pb( + apihub_service.GetExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetExternalApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( common_fields.ExternalApi() ) - await client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + request = apihub_service.GetExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() -def test_get_external_api_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_external_api( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_external_api_flattened_error(): +def test_get_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_external_api( - apihub_service.GetExternalApiRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_external_api(request) -@pytest.mark.asyncio -async def test_get_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_get_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_external_api( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_external_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_external_api( + client.get_external_api( apihub_service.GetExternalApiRequest(), name="name_value", ) +def test_get_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -14816,35 +12468,119 @@ async def test_get_external_api_flattened_error_async(): dict, ], ) -def test_update_external_api(request_type, transport: str = "grpc"): +def test_update_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request_init["external_api"] = { + "name": "projects/sample1/locations/sample2/externalApis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( name="name_value", display_name="display_name_value", description="description_value", endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.update_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14855,60 +12591,13 @@ def test_update_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_update_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -def test_update_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateExternalApiRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -def test_update_external_api_use_cached_wrapped_rpc(): +def test_update_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14928,6 +12617,7 @@ def test_update_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_external_api ] = mock_rpc + request = {} client.update_external_api(request) @@ -14941,225 +12631,229 @@ def test_update_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.update_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -@pytest.mark.asyncio -async def test_update_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_external_api_rest_required_fields( + request_type=apihub_service.UpdateExternalApiRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_external_api - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_external_api - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_external_api(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_external_api_async( - transport: str = "grpc_asyncio", - request_type=apihub_service.UpdateExternalApiRequest, -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.update_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateExternalApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) -@pytest.mark.asyncio -async def test_update_external_api_async_from_dict(): - await test_update_external_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_external_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateExternalApiRequest() + unset_fields = transport.update_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "externalApi", + "updateMask", + ) + ) + ) - request.external_api.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value = common_fields.ExternalApi() - client.update_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateExternalApiRequest.pb( + apihub_service.UpdateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "external_api.name=name_value", - ) in kw["metadata"] + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() + ) + request = apihub_service.UpdateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() -@pytest.mark.asyncio -async def test_update_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.update_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateExternalApiRequest() + pre.assert_called_once() + post.assert_called_once() - request.external_api.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - await client.update_external_api(request) +def test_update_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "external_api.name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_external_api(request) -def test_update_external_api_flattened(): +def test_update_external_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_external_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + + # get arguments that satisfy an http rule for this method + sample_request = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( external_api=common_fields.ExternalApi(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -def test_update_external_api_flattened_error(): +def test_update_external_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -15172,56 +12866,11 @@ def test_update_external_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_external_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_external_api( - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_external_api_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_external_api( - apihub_service.UpdateExternalApiRequest(), - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - @pytest.mark.parametrize( "request_type", @@ -15230,92 +12879,41 @@ async def test_update_external_api_flattened_error_async(): dict, ], ) -def test_delete_external_api(request_type, transport: str = "grpc"): +def test_delete_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_external_api(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_external_api(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest() - - -def test_delete_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteExternalApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest( - name="name_value", - ) - - -def test_delete_external_api_use_cached_wrapped_rpc(): +def test_delete_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -15335,6 +12933,7 @@ def test_delete_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_external_api ] = mock_rpc + request = {} client.delete_external_api(request) @@ -15348,253 +12947,220 @@ def test_delete_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest() - - -@pytest.mark.asyncio -async def test_delete_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_external_api - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_external_api - ] = mock_rpc - - request = {} - await client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_external_api_async( - transport: str = "grpc_asyncio", +def test_delete_external_api_rest_required_fields( request_type=apihub_service.DeleteExternalApiRequest, ): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteExternalApiRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_external_api_async_from_dict(): - await test_delete_external_api_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_external_api_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteExternalApiRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value = None - client.delete_external_api(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_external_api(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteExternalApiRequest() + unset_fields = transport.delete_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_external_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteExternalApiRequest.pb( + apihub_service.DeleteExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() -def test_delete_external_api_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.DeleteExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_external_api( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_external_api_flattened_error(): +def test_delete_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_external_api( - apihub_service.DeleteExternalApiRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_external_api(request) -@pytest.mark.asyncio -async def test_delete_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_external_api( - name="name_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_external_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_external_api( + client.delete_external_api( apihub_service.DeleteExternalApiRequest(), name="name_value", ) +def test_delete_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -15602,97 +13168,46 @@ async def test_delete_external_api_flattened_error_async(): dict, ], ) -def test_list_external_apis(request_type, transport: str = "grpc"): +def test_list_external_apis_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse( next_page_token="next_page_token_value", ) - response = client.list_external_apis(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListExternalApisRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_external_apis(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExternalApisPager) assert response.next_page_token == "next_page_token_value" -def test_list_external_apis_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_external_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest() - - -def test_list_external_apis_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListExternalApisRequest( - parent="parent_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_external_apis(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest( - parent="parent_value", - page_token="page_token_value", - ) - - -def test_list_external_apis_use_cached_wrapped_rpc(): +def test_list_external_apis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -15712,6 +13227,7 @@ def test_list_external_apis_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_external_apis ] = mock_rpc + request = {} client.list_external_apis(request) @@ -15725,277 +13241,252 @@ def test_list_external_apis_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_external_apis_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_external_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest() - - -@pytest.mark.asyncio -async def test_list_external_apis_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_external_apis_rest_required_fields( + request_type=apihub_service.ListExternalApisRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_external_apis - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_external_apis - ] = mock_rpc - - request = {} - await client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_external_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_list_external_apis_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListExternalApisRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_external_apis(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListExternalApisRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExternalApisAsyncPager) - assert response.next_page_token == "next_page_token_value" + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.asyncio -async def test_list_external_apis_async_from_dict(): - await test_list_external_apis_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_list_external_apis_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListExternalApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value = apihub_service.ListExternalApisResponse() - client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_external_apis_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListExternalApisRequest() + response_value = Response() + response_value.status_code = 200 - request.parent = "parent_value" + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse() - ) - await client.list_external_apis(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.list_external_apis(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_external_apis_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_external_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_external_apis( - parent="parent_value", + unset_fields = transport.list_external_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_external_apis_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), + & set(("parent",)) ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_external_apis( - apihub_service.ListExternalApisRequest(), - parent="parent_value", - ) - -@pytest.mark.asyncio -async def test_list_external_apis_flattened_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_external_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_external_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_external_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListExternalApisRequest.pb( + apihub_service.ListExternalApisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListExternalApisResponse.to_json( + apihub_service.ListExternalApisResponse() + ) + + request = apihub_service.ListExternalApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListExternalApisResponse() + + client.list_external_apis( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_external_apis( + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_external_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListExternalApisRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_external_apis(request) + + +def test_list_external_apis_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_external_apis(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_external_apis_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_external_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_external_apis( + client.list_external_apis( apihub_service.ListExternalApisRequest(), parent="parent_value", ) -def test_list_external_apis_pager(transport_name: str = "grpc"): +def test_list_external_apis_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListExternalApisResponse( external_apis=[ common_fields.ExternalApi(), @@ -16020,15040 +13511,1132 @@ def test_list_external_apis_pager(transport_name: str = "grpc"): common_fields.ExternalApi(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListExternalApisResponse.to_json(x) for x in response ) - pager = client.list_external_apis(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_external_apis(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.ExternalApi) for i in results) + pages = list(client.list_external_apis(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_external_apis_pages(transport_name: str = "grpc"): - client = ApiHubClient( + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, + with pytest.raises(ValueError): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - pages = list(client.list_external_apis(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_external_apis_async_pager(): - client = ApiHubAsyncClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, + transport=transport, ) - async_pager = await client.list_external_apis( - request={}, + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, common_fields.ExternalApi) for i in responses) + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -@pytest.mark.asyncio -async def test_list_external_apis_async_pages(): - client = ApiHubAsyncClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_external_apis(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateApiRequest, - dict, - ], -) -def test_create_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["api"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "owner": {"display_name": "display_name_value", "email": "email_value"}, - "versions": ["versions_value1", "versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "target_user": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "team": {}, - "business_unit": {}, - "maturity_level": {}, - "attributes": {}, - "api_style": {}, - "selected_version": "selected_version_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateApiRequest.meta.fields["api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api"][field])): - del request_init["api"][field][i][subfield] - else: - del request_init["api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_create_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_api] = mock_rpc - - request = {} - client.create_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("api_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("apiId",)) - & set( - ( - "parent", - "api", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateApiRequest.pb( - apihub_service.CreateApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.CreateApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.create_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_api(request) - - -def test_create_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, - args[1], - ) - - -def test_create_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api( - apihub_service.CreateApiRequest(), - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - - -def test_create_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetApiRequest, - dict, - ], -) -def test_get_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_get_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_api] = mock_rpc - - request = {} - client.get_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.GetApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.get_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api(request) - - -def test_get_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_get_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api( - apihub_service.GetApiRequest(), - name="name_value", - ) - - -def test_get_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListApisRequest, - dict, - ], -) -def test_list_apis_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_apis(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApisPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_apis_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_apis in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc - - request = {} - client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_apis._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_apis._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_apis(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_apis_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_apis._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_apis_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_apis" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_apis" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListApisResponse.to_json( - apihub_service.ListApisResponse() - ) - - request = apihub_service.ListApisRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListApisResponse() - - client.list_apis( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_apis_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListApisRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_apis(request) - - -def test_list_apis_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_apis(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, - args[1], - ) - - -def test_list_apis_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_apis( - apihub_service.ListApisRequest(), - parent="parent_value", - ) - - -def test_list_apis_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_apis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Api) for i in results) - - pages = list(client.list_apis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateApiRequest, - dict, - ], -) -def test_update_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} - request_init["api"] = { - "name": "projects/sample1/locations/sample2/apis/sample3", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "owner": {"display_name": "display_name_value", "email": "email_value"}, - "versions": ["versions_value1", "versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "target_user": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "team": {}, - "business_unit": {}, - "maturity_level": {}, - "attributes": {}, - "api_style": {}, - "selected_version": "selected_version_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateApiRequest.meta.fields["api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api"][field])): - del request_init["api"][field][i][subfield] - else: - del request_init["api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_update_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_api] = mock_rpc - - request = {} - client.update_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "api", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateApiRequest.pb( - apihub_service.UpdateApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.UpdateApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.update_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_api(request) - - -def test_update_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = { - "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} - } - - # get truthy value for each flattened field - mock_args = dict( - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_update_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_api( - apihub_service.UpdateApiRequest(), - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteApiRequest, - dict, - ], -) -def test_delete_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_api(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc - - request = {} - client.delete_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_api" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteApiRequest.pb( - apihub_service.DeleteApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_api(request) - - -def test_delete_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_delete_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_api( - apihub_service.DeleteApiRequest(), - name="name_value", - ) - - -def test_delete_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateVersionRequest, - dict, - ], -) -def test_create_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request_init["version"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "specs": ["specs_value1", "specs_value2"], - "api_operations": ["api_operations_value1", "api_operations_value2"], - "definitions": ["definitions_value1", "definitions_value2"], - "deployments": ["deployments_value1", "deployments_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lifecycle": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "compliance": {}, - "accreditation": {}, - "attributes": {}, - "selected_deployment": "selected_deployment_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateVersionRequest.meta.fields["version"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["version"][field])): - del request_init["version"][field][i][subfield] - else: - del request_init["version"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_create_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_version] = mock_rpc - - request = {} - client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_version_rest_required_fields( - request_type=apihub_service.CreateVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("version_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("versionId",)) - & set( - ( - "parent", - "version", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateVersionRequest.pb( - apihub_service.CreateVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.CreateVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.create_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_version(request) - - -def test_create_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_create_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_version( - apihub_service.CreateVersionRequest(), - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) - - -def test_create_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetVersionRequest, - dict, - ], -) -def test_get_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_get_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_version] = mock_rpc - - request = {} - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_version_rest_required_fields( - request_type=apihub_service.GetVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetVersionRequest.pb( - apihub_service.GetVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.GetVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.get_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_version(request) - - -def test_get_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - apihub_service.GetVersionRequest(), - name="name_value", - ) - - -def test_get_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListVersionsRequest, - dict, - ], -) -def test_list_versions_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_versions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_versions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_versions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc - - request = {} - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_versions_rest_required_fields( - request_type=apihub_service.ListVersionsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_versions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_versions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_versions(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_versions_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_versions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_versions_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_versions" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_versions" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListVersionsRequest.pb( - apihub_service.ListVersionsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListVersionsResponse.to_json( - apihub_service.ListVersionsResponse() - ) - - request = apihub_service.ListVersionsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListVersionsResponse() - - client.list_versions( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_versions_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListVersionsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_versions(request) - - -def test_list_versions_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_versions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_list_versions_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - apihub_service.ListVersionsRequest(), - parent="parent_value", - ) - - -def test_list_versions_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListVersionsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - pager = client.list_versions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Version) for i in results) - - pages = list(client.list_versions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateVersionRequest, - dict, - ], -) -def test_update_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - request_init["version"] = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "specs": ["specs_value1", "specs_value2"], - "api_operations": ["api_operations_value1", "api_operations_value2"], - "definitions": ["definitions_value1", "definitions_value2"], - "deployments": ["deployments_value1", "deployments_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lifecycle": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "compliance": {}, - "accreditation": {}, - "attributes": {}, - "selected_deployment": "selected_deployment_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["version"][field])): - del request_init["version"][field][i][subfield] - else: - del request_init["version"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_update_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_version] = mock_rpc - - request = {} - client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_version_rest_required_fields( - request_type=apihub_service.UpdateVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "version", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateVersionRequest.pb( - apihub_service.UpdateVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.UpdateVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.update_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_version(request) - - -def test_update_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - - # get truthy value for each flattened field - mock_args = dict( - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_update_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_version( - apihub_service.UpdateVersionRequest(), - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteVersionRequest, - dict, - ], -) -def test_delete_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_version(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc - - request = {} - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_version_rest_required_fields( - request_type=apihub_service.DeleteVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_version" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteVersionRequest.pb( - apihub_service.DeleteVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_version(request) - - -def test_delete_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - apihub_service.DeleteVersionRequest(), - name="name_value", - ) - - -def test_delete_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateSpecRequest, - dict, - ], -) -def test_create_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request_init["spec"] = { - "name": "name_value", - "display_name": "display_name_value", - "spec_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - "details": { - "open_api_spec_details": { - "format_": 1, - "version": "version_value", - "owner": {"display_name": "display_name_value", "email": "email_value"}, - }, - "description": "description_value", - }, - "source_uri": "source_uri_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lint_response": { - "issues": [ - { - "code": "code_value", - "path": ["path_value1", "path_value2"], - "message": "message_value", - "severity": 1, - "range_": {"start": {"line": 424, "character": 941}, "end": {}}, - } - ], - "summary": [{"severity": 1, "count": 553}], - "state": 1, - "source": "source_value", - "linter": 1, - "create_time": {}, - }, - "attributes": {}, - "documentation": {"external_uri": "external_uri_value"}, - "parsing_mode": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["spec"][field])): - del request_init["spec"][field][i][subfield] - else: - del request_init["spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_create_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc - - request = {} - client.create_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_spec_rest_required_fields( - request_type=apihub_service.CreateSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("spec_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_spec._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("specId",)) - & set( - ( - "parent", - "spec", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateSpecRequest.pb( - apihub_service.CreateSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.CreateSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.create_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_spec(request) - - -def test_create_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" - % client.transport._host, - args[1], - ) - - -def test_create_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_spec( - apihub_service.CreateSpecRequest(), - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - - -def test_create_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetSpecRequest, - dict, - ], -) -def test_get_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_get_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc - - request = {} - client.get_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.GetSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.get_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_spec(request) - - -def test_get_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_get_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec( - apihub_service.GetSpecRequest(), - name="name_value", - ) - - -def test_get_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetSpecContentsRequest, - dict, - ], -) -def test_get_spec_contents_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_spec_contents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.SpecContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" - - -def test_get_spec_contents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_spec_contents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_spec_contents - ] = mock_rpc - - request = {} - client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_spec_contents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_spec_contents_rest_required_fields( - request_type=apihub_service.GetSpecContentsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_spec_contents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_spec_contents_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_spec_contents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_spec_contents_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_spec_contents" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_spec_contents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetSpecContentsRequest.pb( - apihub_service.GetSpecContentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.SpecContents.to_json( - common_fields.SpecContents() - ) - - request = apihub_service.GetSpecContentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.SpecContents() - - client.get_spec_contents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_spec_contents_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_spec_contents(request) - - -def test_get_spec_contents_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_spec_contents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" - % client.transport._host, - args[1], - ) - - -def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec_contents( - apihub_service.GetSpecContentsRequest(), - name="name_value", - ) - - -def test_get_spec_contents_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListSpecsRequest, - dict, - ], -) -def test_list_specs_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_specs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpecsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_specs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc - - request = {} - client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_specs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_specs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_specs(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_specs_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_specs._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_specs_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_specs" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_specs" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListSpecsRequest.pb( - apihub_service.ListSpecsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListSpecsResponse.to_json( - apihub_service.ListSpecsResponse() - ) - - request = apihub_service.ListSpecsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListSpecsResponse() - - client.list_specs( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_specs_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListSpecsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_specs(request) - - -def test_list_specs_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_specs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" - % client.transport._host, - args[1], - ) - - -def test_list_specs_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_specs( - apihub_service.ListSpecsRequest(), - parent="parent_value", - ) - - -def test_list_specs_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - pager = client.list_specs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Spec) for i in results) - - pages = list(client.list_specs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateSpecRequest, - dict, - ], -) -def test_update_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - request_init["spec"] = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", - "display_name": "display_name_value", - "spec_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - "details": { - "open_api_spec_details": { - "format_": 1, - "version": "version_value", - "owner": {"display_name": "display_name_value", "email": "email_value"}, - }, - "description": "description_value", - }, - "source_uri": "source_uri_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lint_response": { - "issues": [ - { - "code": "code_value", - "path": ["path_value1", "path_value2"], - "message": "message_value", - "severity": 1, - "range_": {"start": {"line": 424, "character": 941}, "end": {}}, - } - ], - "summary": [{"severity": 1, "count": 553}], - "state": 1, - "source": "source_value", - "linter": 1, - "create_time": {}, - }, - "attributes": {}, - "documentation": {"external_uri": "external_uri_value"}, - "parsing_mode": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["spec"][field])): - del request_init["spec"][field][i][subfield] - else: - del request_init["spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_update_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc - - request = {} - client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_spec_rest_required_fields( - request_type=apihub_service.UpdateSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_spec._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "spec", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateSpecRequest.pb( - apihub_service.UpdateSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.UpdateSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.update_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_spec(request) - - -def test_update_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - - # get truthy value for each flattened field - mock_args = dict( - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_update_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_spec( - apihub_service.UpdateSpecRequest(), - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteSpecRequest, - dict, - ], -) -def test_delete_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_spec(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc - - request = {} - client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_spec_rest_required_fields( - request_type=apihub_service.DeleteSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_spec" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteSpecRequest.pb( - apihub_service.DeleteSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_spec(request) - - -def test_delete_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_spec( - apihub_service.DeleteSpecRequest(), - name="name_value", - ) - - -def test_delete_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetApiOperationRequest, - dict, - ], -) -def test_get_api_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiOperation) - assert response.name == "name_value" - assert response.spec == "spec_value" - - -def test_get_api_operation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_api_operation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_api_operation - ] = mock_rpc - - request = {} - client.get_api_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_operation_rest_required_fields( - request_type=apihub_service.GetApiOperationRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api_operation(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_operation_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api_operation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_operation_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_api_operation" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_api_operation" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetApiOperationRequest.pb( - apihub_service.GetApiOperationRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ApiOperation.to_json( - common_fields.ApiOperation() - ) - - request = apihub_service.GetApiOperationRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ApiOperation() - - client.get_api_operation( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_operation_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetApiOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api_operation(request) - - -def test_get_api_operation_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api_operation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" - % client.transport._host, - args[1], - ) - - -def test_get_api_operation_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_operation( - apihub_service.GetApiOperationRequest(), - name="name_value", - ) - - -def test_get_api_operation_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListApiOperationsRequest, - dict, - ], -) -def test_list_api_operations_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_api_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApiOperationsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_api_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_api_operations in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_api_operations - ] = mock_rpc - - request = {} - client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_api_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_api_operations_rest_required_fields( - request_type=apihub_service.ListApiOperationsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_api_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_api_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_api_operations(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_api_operations_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_api_operations._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_api_operations_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_api_operations" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_api_operations" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListApiOperationsRequest.pb( - apihub_service.ListApiOperationsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( - apihub_service.ListApiOperationsResponse() - ) - - request = apihub_service.ListApiOperationsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListApiOperationsResponse() - - client.list_api_operations( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_api_operations_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_api_operations(request) - - -def test_list_api_operations_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_api_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" - % client.transport._host, - args[1], - ) - - -def test_list_api_operations_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_api_operations( - apihub_service.ListApiOperationsRequest(), - parent="parent_value", - ) - - -def test_list_api_operations_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListApiOperationsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - pager = client.list_api_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.ApiOperation) for i in results) - - pages = list(client.list_api_operations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDefinitionRequest, - dict, - ], -) -def test_get_definition_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_definition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Definition) - assert response.name == "name_value" - assert response.spec == "spec_value" - assert response.type_ == common_fields.Definition.Type.SCHEMA - - -def test_get_definition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_definition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc - - request = {} - client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_definition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_definition_rest_required_fields( - request_type=apihub_service.GetDefinitionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_definition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_definition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_definition(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_definition_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_definition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_definition_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_definition" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_definition" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDefinitionRequest.pb( - apihub_service.GetDefinitionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Definition.to_json( - common_fields.Definition() - ) - - request = apihub_service.GetDefinitionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Definition() - - client.get_definition( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_definition_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDefinitionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_definition(request) - - -def test_get_definition_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_definition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_definition_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_definition( - apihub_service.GetDefinitionRequest(), - name="name_value", - ) - - -def test_get_definition_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateDeploymentRequest, - dict, - ], -) -def test_create_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["deployment"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "deployment_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "resource_uri": "resource_uri_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "api_versions": ["api_versions_value1", "api_versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "slo": {}, - "environment": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_create_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_deployment - ] = mock_rpc - - request = {} - client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_deployment_rest_required_fields( - request_type=apihub_service.CreateDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("deployment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("deploymentId",)) - & set( - ( - "parent", - "deployment", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateDeploymentRequest.pb( - apihub_service.CreateDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.CreateDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.create_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_deployment(request) - - -def test_create_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) - - -def test_create_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - apihub_service.CreateDeploymentRequest(), - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) - - -def test_create_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDeploymentRequest, - dict, - ], -) -def test_get_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_get_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc - - request = {} - client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_deployment_rest_required_fields( - request_type=apihub_service.GetDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDeploymentRequest.pb( - apihub_service.GetDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.GetDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.get_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_deployment(request) - - -def test_get_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_get_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - apihub_service.GetDeploymentRequest(), - name="name_value", - ) - - -def test_get_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDeploymentsRequest, - dict, - ], -) -def test_list_deployments_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_deployments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_deployments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_deployments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_deployments - ] = mock_rpc - - request = {} - client.list_deployments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_deployments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_deployments_rest_required_fields( - request_type=apihub_service.ListDeploymentsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_deployments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_deployments_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_deployments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deployments_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_deployments" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_deployments" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListDeploymentsRequest.pb( - apihub_service.ListDeploymentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( - apihub_service.ListDeploymentsResponse() - ) - - request = apihub_service.ListDeploymentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListDeploymentsResponse() - - client.list_deployments( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_deployments_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_deployments(request) - - -def test_list_deployments_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_deployments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) - - -def test_list_deployments_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - apihub_service.ListDeploymentsRequest(), - parent="parent_value", - ) - - -def test_list_deployments_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListDeploymentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_deployments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Deployment) for i in results) - - pages = list(client.list_deployments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateDeploymentRequest, - dict, - ], -) -def test_update_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request_init["deployment"] = { - "name": "projects/sample1/locations/sample2/deployments/sample3", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "deployment_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "resource_uri": "resource_uri_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "api_versions": ["api_versions_value1", "api_versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "slo": {}, - "environment": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_update_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_deployment - ] = mock_rpc - - request = {} - client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_deployment_rest_required_fields( - request_type=apihub_service.UpdateDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "deployment", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateDeploymentRequest.pb( - apihub_service.UpdateDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.UpdateDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.update_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_deployment(request) - - -def test_update_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_update_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deployment( - apihub_service.UpdateDeploymentRequest(), - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteDeploymentRequest, - dict, - ], -) -def test_delete_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_deployment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_deployment - ] = mock_rpc - - request = {} - client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_deployment_rest_required_fields( - request_type=apihub_service.DeleteDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_deployment" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteDeploymentRequest.pb( - apihub_service.DeleteDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_deployment(request) - - -def test_delete_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - apihub_service.DeleteDeploymentRequest(), - name="name_value", - ) - - -def test_delete_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateAttributeRequest, - dict, - ], -) -def test_create_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["attribute"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "definition_type": 1, - "scope": 1, - "data_type": 1, - "allowed_values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ], - "cardinality": 1172, - "mandatory": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribute"][field])): - del request_init["attribute"][field][i][subfield] - else: - del request_init["attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_create_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_attribute - ] = mock_rpc - - request = {} - client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_attribute_rest_required_fields( - request_type=apihub_service.CreateAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("attribute_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_attribute._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("attributeId",)) - & set( - ( - "parent", - "attribute", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateAttributeRequest.pb( - apihub_service.CreateAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.CreateAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.create_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_attribute(request) - - -def test_create_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, - args[1], - ) - - -def test_create_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_attribute( - apihub_service.CreateAttributeRequest(), - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) - - -def test_create_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetAttributeRequest, - dict, - ], -) -def test_get_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_get_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc - - request = {} - client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_attribute_rest_required_fields( - request_type=apihub_service.GetAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetAttributeRequest.pb( - apihub_service.GetAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.GetAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.get_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_attribute(request) - - -def test_get_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, - args[1], - ) - - -def test_get_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_attribute( - apihub_service.GetAttributeRequest(), - name="name_value", - ) - - -def test_get_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateAttributeRequest, - dict, - ], -) -def test_update_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} - } - request_init["attribute"] = { - "name": "projects/sample1/locations/sample2/attributes/sample3", - "display_name": "display_name_value", - "description": "description_value", - "definition_type": 1, - "scope": 1, - "data_type": 1, - "allowed_values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ], - "cardinality": 1172, - "mandatory": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribute"][field])): - del request_init["attribute"][field][i][subfield] - else: - del request_init["attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_update_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_attribute - ] = mock_rpc - - request = {} - client.update_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_attribute_rest_required_fields( - request_type=apihub_service.UpdateAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_attribute._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "attribute", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateAttributeRequest.pb( - apihub_service.UpdateAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.UpdateAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.update_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_attribute(request) - - -def test_update_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = { - "attribute": { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" - % client.transport._host, - args[1], - ) - - -def test_update_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribute( - apihub_service.UpdateAttributeRequest(), - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteAttributeRequest, - dict, - ], -) -def test_delete_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_attribute(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_attribute - ] = mock_rpc - - request = {} - client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_attribute_rest_required_fields( - request_type=apihub_service.DeleteAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_attribute" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteAttributeRequest.pb( - apihub_service.DeleteAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_attribute(request) - - -def test_delete_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, - args[1], - ) - - -def test_delete_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_attribute( - apihub_service.DeleteAttributeRequest(), - name="name_value", - ) - - -def test_delete_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListAttributesRequest, - dict, - ], -) -def test_list_attributes_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_attributes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAttributesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_attributes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_attributes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc - - request = {} - client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_attributes_rest_required_fields( - request_type=apihub_service.ListAttributesRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_attributes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_attributes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_attributes(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_attributes_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_attributes._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_attributes_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_attributes" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_attributes" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListAttributesRequest.pb( - apihub_service.ListAttributesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListAttributesResponse.to_json( - apihub_service.ListAttributesResponse() - ) - - request = apihub_service.ListAttributesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListAttributesResponse() - - client.list_attributes( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_attributes_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListAttributesRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_attributes(request) - - -def test_list_attributes_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_attributes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, - args[1], - ) - - -def test_list_attributes_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_attributes( - apihub_service.ListAttributesRequest(), - parent="parent_value", - ) - - -def test_list_attributes_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListAttributesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_attributes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Attribute) for i in results) - - pages = list(client.list_attributes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.SearchResourcesRequest, - dict, - ], -) -def test_search_resources_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.search_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchResourcesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_search_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.search_resources - ] = mock_rpc - - request = {} - client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_resources_rest_required_fields( - request_type=apihub_service.SearchResourcesRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["location"] = "" - request_init["query"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).search_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["location"] = "location_value" - jsonified_request["query"] = "query_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).search_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" - assert "query" in jsonified_request - assert jsonified_request["query"] == "query_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.search_resources(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_search_resources_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.search_resources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "location", - "query", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_resources_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_search_resources" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_search_resources" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.SearchResourcesRequest.pb( - apihub_service.SearchResourcesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.SearchResourcesResponse.to_json( - apihub_service.SearchResourcesResponse() - ) - - request = apihub_service.SearchResourcesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.SearchResourcesResponse() - - client.search_resources( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_resources_rest_bad_request( - transport: str = "rest", request_type=apihub_service.SearchResourcesRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_resources(request) - - -def test_search_resources_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - location="location_value", - query="query_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.search_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{location=projects/*/locations/*}:searchResources" - % client.transport._host, - args[1], - ) - - -def test_search_resources_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_resources( - apihub_service.SearchResourcesRequest(), - location="location_value", - query="query_value", - ) - - -def test_search_resources_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.SearchResourcesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"location": "projects/sample1/locations/sample2"} - - pager = client.search_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, apihub_service.SearchResult) for i in results) - - pages = list(client.search_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateExternalApiRequest, - dict, - ], -) -def test_create_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["external_api"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "paths": ["paths_value1", "paths_value2"], - "documentation": {"external_uri": "external_uri_value"}, - "attributes": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["external_api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["external_api"][field])): - del request_init["external_api"][field][i][subfield] - else: - del request_init["external_api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_create_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_external_api - ] = mock_rpc - - request = {} - client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_external_api_rest_required_fields( - request_type=apihub_service.CreateExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_external_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("external_api_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_external_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("externalApiId",)) - & set( - ( - "parent", - "externalApi", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateExternalApiRequest.pb( - apihub_service.CreateExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.CreateExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.create_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_external_api(request) - - -def test_create_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/externalApis" - % client.transport._host, - args[1], - ) - - -def test_create_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_external_api( - apihub_service.CreateExternalApiRequest(), - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) - - -def test_create_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetExternalApiRequest, - dict, - ], -) -def test_get_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_get_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_external_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_external_api - ] = mock_rpc - - request = {} - client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_external_api_rest_required_fields( - request_type=apihub_service.GetExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_external_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetExternalApiRequest.pb( - apihub_service.GetExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.GetExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.get_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_external_api(request) - - -def test_get_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_get_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_external_api( - apihub_service.GetExternalApiRequest(), - name="name_value", - ) - - -def test_get_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateExternalApiRequest, - dict, - ], -) -def test_update_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - request_init["external_api"] = { - "name": "projects/sample1/locations/sample2/externalApis/sample3", - "display_name": "display_name_value", - "description": "description_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "paths": ["paths_value1", "paths_value2"], - "documentation": {"external_uri": "external_uri_value"}, - "attributes": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["external_api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["external_api"][field])): - del request_init["external_api"][field][i][subfield] - else: - del request_init["external_api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_update_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_external_api - ] = mock_rpc - - request = {} - client.update_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_external_api_rest_required_fields( - request_type=apihub_service.UpdateExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_external_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_external_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "externalApi", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateExternalApiRequest.pb( - apihub_service.UpdateExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.UpdateExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.update_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_external_api(request) - - -def test_update_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_update_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_external_api( - apihub_service.UpdateExternalApiRequest(), - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteExternalApiRequest, - dict, - ], -) -def test_delete_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_external_api(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_external_api - ] = mock_rpc - - request = {} - client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_external_api_rest_required_fields( - request_type=apihub_service.DeleteExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_external_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_external_api" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteExternalApiRequest.pb( - apihub_service.DeleteExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_external_api(request) - - -def test_delete_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_external_api( - apihub_service.DeleteExternalApiRequest(), - name="name_value", - ) - - -def test_delete_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListExternalApisRequest, - dict, - ], -) -def test_list_external_apis_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_external_apis(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExternalApisPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_external_apis_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_external_apis in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_external_apis - ] = mock_rpc - - request = {} - client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_external_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_external_apis_rest_required_fields( - request_type=apihub_service.ListExternalApisRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_external_apis._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_external_apis._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_external_apis(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_external_apis_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_external_apis._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_external_apis_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_external_apis" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_external_apis" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListExternalApisRequest.pb( - apihub_service.ListExternalApisRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListExternalApisResponse.to_json( - apihub_service.ListExternalApisResponse() - ) - - request = apihub_service.ListExternalApisRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListExternalApisResponse() - - client.list_external_apis( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_external_apis_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListExternalApisRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_external_apis(request) - - -def test_list_external_apis_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_external_apis(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/externalApis" - % client.transport._host, - args[1], - ) - - -def test_list_external_apis_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_external_apis( - apihub_service.ListExternalApisRequest(), - parent="parent_value", - ) - - -def test_list_external_apis_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListExternalApisResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_external_apis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.ExternalApi) for i in results) - - pages = list(client.list_external_apis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - transports.ApiHubRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubGrpcTransport, - ) - - -def test_api_hub_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_api", - "get_api", - "list_apis", - "update_api", - "delete_api", - "create_version", - "get_version", - "list_versions", - "update_version", - "delete_version", - "create_spec", - "get_spec", - "get_spec_contents", - "list_specs", - "update_spec", - "delete_spec", - "get_api_operation", - "list_api_operations", - "get_definition", - "create_deployment", - "get_deployment", - "list_deployments", - "update_deployment", - "delete_deployment", - "create_attribute", - "get_attribute", - "update_attribute", - "delete_attribute", - "list_attributes", - "search_resources", - "create_external_api", - "get_external_api", - "update_external_api", - "delete_external_api", - "list_external_apis", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubTransport() - adc.assert_called_once() - - -def test_api_hub_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - ], -) -def test_api_hub_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - transports.ApiHubRestTransport, - ], -) -def test_api_hub_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubGrpcTransport, grpc_helpers), - (transports.ApiHubGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_host_no_port(transport_name): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_host_with_port(transport_name): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_api._session - session2 = client2.transport.create_api._session - assert session1 != session2 - session1 = client1.transport.get_api._session - session2 = client2.transport.get_api._session - assert session1 != session2 - session1 = client1.transport.list_apis._session - session2 = client2.transport.list_apis._session - assert session1 != session2 - session1 = client1.transport.update_api._session - session2 = client2.transport.update_api._session - assert session1 != session2 - session1 = client1.transport.delete_api._session - session2 = client2.transport.delete_api._session - assert session1 != session2 - session1 = client1.transport.create_version._session - session2 = client2.transport.create_version._session - assert session1 != session2 - session1 = client1.transport.get_version._session - session2 = client2.transport.get_version._session - assert session1 != session2 - session1 = client1.transport.list_versions._session - session2 = client2.transport.list_versions._session - assert session1 != session2 - session1 = client1.transport.update_version._session - session2 = client2.transport.update_version._session - assert session1 != session2 - session1 = client1.transport.delete_version._session - session2 = client2.transport.delete_version._session - assert session1 != session2 - session1 = client1.transport.create_spec._session - session2 = client2.transport.create_spec._session - assert session1 != session2 - session1 = client1.transport.get_spec._session - session2 = client2.transport.get_spec._session - assert session1 != session2 - session1 = client1.transport.get_spec_contents._session - session2 = client2.transport.get_spec_contents._session - assert session1 != session2 - session1 = client1.transport.list_specs._session - session2 = client2.transport.list_specs._session - assert session1 != session2 - session1 = client1.transport.update_spec._session - session2 = client2.transport.update_spec._session - assert session1 != session2 - session1 = client1.transport.delete_spec._session - session2 = client2.transport.delete_spec._session - assert session1 != session2 - session1 = client1.transport.get_api_operation._session - session2 = client2.transport.get_api_operation._session - assert session1 != session2 - session1 = client1.transport.list_api_operations._session - session2 = client2.transport.list_api_operations._session - assert session1 != session2 - session1 = client1.transport.get_definition._session - session2 = client2.transport.get_definition._session - assert session1 != session2 - session1 = client1.transport.create_deployment._session - session2 = client2.transport.create_deployment._session - assert session1 != session2 - session1 = client1.transport.get_deployment._session - session2 = client2.transport.get_deployment._session - assert session1 != session2 - session1 = client1.transport.list_deployments._session - session2 = client2.transport.list_deployments._session - assert session1 != session2 - session1 = client1.transport.update_deployment._session - session2 = client2.transport.update_deployment._session - assert session1 != session2 - session1 = client1.transport.delete_deployment._session - session2 = client2.transport.delete_deployment._session - assert session1 != session2 - session1 = client1.transport.create_attribute._session - session2 = client2.transport.create_attribute._session - assert session1 != session2 - session1 = client1.transport.get_attribute._session - session2 = client2.transport.get_attribute._session - assert session1 != session2 - session1 = client1.transport.update_attribute._session - session2 = client2.transport.update_attribute._session - assert session1 != session2 - session1 = client1.transport.delete_attribute._session - session2 = client2.transport.delete_attribute._session - assert session1 != session2 - session1 = client1.transport.list_attributes._session - session2 = client2.transport.list_attributes._session - assert session1 != session2 - session1 = client1.transport.search_resources._session - session2 = client2.transport.search_resources._session - assert session1 != session2 - session1 = client1.transport.create_external_api._session - session2 = client2.transport.create_external_api._session - assert session1 != session2 - session1 = client1.transport.get_external_api._session - session2 = client2.transport.get_external_api._session - assert session1 != session2 - session1 = client1.transport.update_external_api._session - session2 = client2.transport.update_external_api._session - assert session1 != session2 - session1 = client1.transport.delete_external_api._session - session2 = client2.transport.delete_external_api._session - assert session1 != session2 - session1 = client1.transport.list_external_apis._session - session2 = client2.transport.list_external_apis._session - assert session1 != session2 - - -def test_api_hub_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_api_path(): - project = "squid" - location = "clam" - api = "whelk" - expected = "projects/{project}/locations/{location}/apis/{api}".format( - project=project, - location=location, - api=api, - ) - actual = ApiHubClient.api_path(project, location, api) - assert expected == actual - - -def test_parse_api_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api": "nudibranch", - } - path = ApiHubClient.api_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_api_path(path) - assert expected == actual - - -def test_api_operation_path(): - project = "cuttlefish" - location = "mussel" - api = "winkle" - version = "nautilus" - operation = "scallop" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( - project=project, - location=location, - api=api, - version=version, - operation=operation, - ) - actual = ApiHubClient.api_operation_path(project, location, api, version, operation) - assert expected == actual - - -def test_parse_api_operation_path(): - expected = { - "project": "abalone", - "location": "squid", - "api": "clam", - "version": "whelk", - "operation": "octopus", - } - path = ApiHubClient.api_operation_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_api_operation_path(path) - assert expected == actual - - -def test_attribute_path(): - project = "oyster" - location = "nudibranch" - attribute = "cuttlefish" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "mussel", - "location": "winkle", - "attribute": "nautilus", - } - path = ApiHubClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_attribute_path(path) - assert expected == actual - - -def test_definition_path(): - project = "scallop" - location = "abalone" - api = "squid" - version = "clam" - definition = "whelk" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( - project=project, - location=location, - api=api, - version=version, - definition=definition, - ) - actual = ApiHubClient.definition_path(project, location, api, version, definition) - assert expected == actual - - -def test_parse_definition_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api": "nudibranch", - "version": "cuttlefish", - "definition": "mussel", - } - path = ApiHubClient.definition_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_definition_path(path) - assert expected == actual - - -def test_deployment_path(): - project = "winkle" - location = "nautilus" - deployment = "scallop" - expected = ( - "projects/{project}/locations/{location}/deployments/{deployment}".format( - project=project, - location=location, - deployment=deployment, - ) - ) - actual = ApiHubClient.deployment_path(project, location, deployment) - assert expected == actual - - -def test_parse_deployment_path(): - expected = { - "project": "abalone", - "location": "squid", - "deployment": "clam", - } - path = ApiHubClient.deployment_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_deployment_path(path) - assert expected == actual - - -def test_external_api_path(): - project = "whelk" - location = "octopus" - external_api = "oyster" - expected = ( - "projects/{project}/locations/{location}/externalApis/{external_api}".format( - project=project, - location=location, - external_api=external_api, - ) - ) - actual = ApiHubClient.external_api_path(project, location, external_api) - assert expected == actual - - -def test_parse_external_api_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "external_api": "mussel", - } - path = ApiHubClient.external_api_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_external_api_path(path) - assert expected == actual - - -def test_spec_path(): - project = "winkle" - location = "nautilus" - api = "scallop" - version = "abalone" - spec = "squid" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( - project=project, - location=location, - api=api, - version=version, - spec=spec, - ) - actual = ApiHubClient.spec_path(project, location, api, version, spec) - assert expected == actual - - -def test_parse_spec_path(): - expected = { - "project": "clam", - "location": "whelk", - "api": "octopus", - "version": "oyster", - "spec": "nudibranch", - } - path = ApiHubClient.spec_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_spec_path(path) - assert expected == actual - - -def test_version_path(): - project = "cuttlefish" - location = "mussel" - api = "winkle" - version = "nautilus" - expected = ( - "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( - project=project, - location=location, - api=api, - version=version, - ) - ) - actual = ApiHubClient.version_path(project, location, api, version) - assert expected == actual - - -def test_parse_version_path(): - expected = { - "project": "scallop", - "location": "abalone", - "api": "squid", - "version": "clam", - } - path = ApiHubClient.version_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_version_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ApiHubClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ApiHubClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ApiHubClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ApiHubClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ApiHubClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) + client = ApiHubClient(transport=transport) + assert client.transport is transport @pytest.mark.parametrize( - "request_type", + "transport_class", [ - locations_pb2.GetLocationRequest, - dict, + transports.ApiHubRestTransport, ], ) -def test_get_location_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - locations_pb2.ListLocationsRequest, - dict, + "rest", ], ) -def test_list_locations_rest(request_type): - client = ApiHubClient( +def test_transport_kind(transport_name): + transport = ApiHubClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + assert transport.kind == transport_name - response = client.list_locations(request) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) +def test_api_hub_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_api_hub_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api", + "get_api", + "list_apis", + "update_api", + "delete_api", + "create_version", + "get_version", + "list_versions", + "update_version", + "delete_version", + "create_spec", + "get_spec", + "get_spec_contents", + "list_specs", + "update_spec", + "delete_spec", + "get_api_operation", + "list_api_operations", + "get_definition", + "create_deployment", + "get_deployment", + "list_deployments", + "update_deployment", + "delete_deployment", + "create_attribute", + "get_attribute", + "update_attribute", + "delete_attribute", + "list_attributes", + "search_resources", + "create_external_api", + "get_external_api", + "update_external_api", + "delete_external_api", + "list_external_apis", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_api_hub_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - response = client.cancel_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_api_hub_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport() + adc.assert_called_once() -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_api_hub_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) +def test_api_hub_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.DeleteOperationRequest, - dict, + "rest", ], ) -def test_delete_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): +def test_api_hub_host_no_port(transport_name): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.GetOperationRequest, - dict, + "rest", ], ) -def test_get_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): +def test_api_hub_host_with_port(transport_name): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_api_hub_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubClient( + credentials=creds1, + transport=transport_name, ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + client2 = ApiHubClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api._session + session2 = client2.transport.create_api._session + assert session1 != session2 + session1 = client1.transport.get_api._session + session2 = client2.transport.get_api._session + assert session1 != session2 + session1 = client1.transport.list_apis._session + session2 = client2.transport.list_apis._session + assert session1 != session2 + session1 = client1.transport.update_api._session + session2 = client2.transport.update_api._session + assert session1 != session2 + session1 = client1.transport.delete_api._session + session2 = client2.transport.delete_api._session + assert session1 != session2 + session1 = client1.transport.create_version._session + session2 = client2.transport.create_version._session + assert session1 != session2 + session1 = client1.transport.get_version._session + session2 = client2.transport.get_version._session + assert session1 != session2 + session1 = client1.transport.list_versions._session + session2 = client2.transport.list_versions._session + assert session1 != session2 + session1 = client1.transport.update_version._session + session2 = client2.transport.update_version._session + assert session1 != session2 + session1 = client1.transport.delete_version._session + session2 = client2.transport.delete_version._session + assert session1 != session2 + session1 = client1.transport.create_spec._session + session2 = client2.transport.create_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec._session + session2 = client2.transport.get_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec_contents._session + session2 = client2.transport.get_spec_contents._session + assert session1 != session2 + session1 = client1.transport.list_specs._session + session2 = client2.transport.list_specs._session + assert session1 != session2 + session1 = client1.transport.update_spec._session + session2 = client2.transport.update_spec._session + assert session1 != session2 + session1 = client1.transport.delete_spec._session + session2 = client2.transport.delete_spec._session + assert session1 != session2 + session1 = client1.transport.get_api_operation._session + session2 = client2.transport.get_api_operation._session + assert session1 != session2 + session1 = client1.transport.list_api_operations._session + session2 = client2.transport.list_api_operations._session + assert session1 != session2 + session1 = client1.transport.get_definition._session + session2 = client2.transport.get_definition._session + assert session1 != session2 + session1 = client1.transport.create_deployment._session + session2 = client2.transport.create_deployment._session + assert session1 != session2 + session1 = client1.transport.get_deployment._session + session2 = client2.transport.get_deployment._session + assert session1 != session2 + session1 = client1.transport.list_deployments._session + session2 = client2.transport.list_deployments._session + assert session1 != session2 + session1 = client1.transport.update_deployment._session + session2 = client2.transport.update_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_deployment._session + session2 = client2.transport.delete_deployment._session + assert session1 != session2 + session1 = client1.transport.create_attribute._session + session2 = client2.transport.create_attribute._session + assert session1 != session2 + session1 = client1.transport.get_attribute._session + session2 = client2.transport.get_attribute._session + assert session1 != session2 + session1 = client1.transport.update_attribute._session + session2 = client2.transport.update_attribute._session + assert session1 != session2 + session1 = client1.transport.delete_attribute._session + session2 = client2.transport.delete_attribute._session + assert session1 != session2 + session1 = client1.transport.list_attributes._session + session2 = client2.transport.list_attributes._session + assert session1 != session2 + session1 = client1.transport.search_resources._session + session2 = client2.transport.search_resources._session + assert session1 != session2 + session1 = client1.transport.create_external_api._session + session2 = client2.transport.create_external_api._session + assert session1 != session2 + session1 = client1.transport.get_external_api._session + session2 = client2.transport.get_external_api._session + assert session1 != session2 + session1 = client1.transport.update_external_api._session + session2 = client2.transport.update_external_api._session + assert session1 != session2 + session1 = client1.transport.delete_external_api._session + session2 = client2.transport.delete_external_api._session + assert session1 != session2 + session1 = client1.transport.list_external_apis._session + session2 = client2.transport.list_external_apis._session + assert session1 != session2 -def test_delete_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_api_path(): + project = "squid" + location = "clam" + api = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}".format( + project=project, + location=location, + api=api, ) + actual = ApiHubClient.api_path(project, location, api) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + } + path = ApiHubClient.api_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_api_operation_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + operation = "scallop" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( + project=project, + location=location, + api=api, + version=version, + operation=operation, ) + actual = ApiHubClient.api_operation_path(project, location, api, version, operation) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_operation_path(): + expected = { + "project": "abalone", + "location": "squid", + "api": "clam", + "version": "whelk", + "operation": "octopus", + } + path = ApiHubClient.api_operation_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_operation_path(path) + assert expected == actual -def test_delete_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "oyster" + location = "nudibranch" + attribute = "cuttlefish" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "mussel", + "location": "winkle", + "attribute": "nautilus", + } + path = ApiHubClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_definition_path(): + project = "scallop" + location = "abalone" + api = "squid" + version = "clam" + definition = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( + project=project, + location=location, + api=api, + version=version, + definition=definition, ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + actual = ApiHubClient.definition_path(project, location, api, version, definition) + assert expected == actual -def test_delete_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None +def test_parse_definition_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + "version": "cuttlefish", + "definition": "mussel", + } + path = ApiHubClient.definition_path(**expected) - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + # Check that the path construction is reversible. + actual = ApiHubClient.parse_definition_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_deployment_path(): + project = "winkle" + location = "nautilus" + deployment = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + actual = ApiHubClient.deployment_path(project, location, deployment) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_deployment_path(): + expected = { + "project": "abalone", + "location": "squid", + "deployment": "clam", + } + path = ApiHubClient.deployment_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_deployment_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_external_api_path(): + project = "whelk" + location = "octopus" + external_api = "oyster" + expected = ( + "projects/{project}/locations/{location}/externalApis/{external_api}".format( + project=project, + location=location, + external_api=external_api, + ) ) + actual = ApiHubClient.external_api_path(project, location, external_api) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_external_api_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "external_api": "mussel", + } + path = ApiHubClient.external_api_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_external_api_path(path) + assert expected == actual -def test_cancel_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_spec_path(): + project = "winkle" + location = "nautilus" + api = "scallop" + version = "abalone" + spec = "squid" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, ) + actual = ApiHubClient.spec_path(project, location, api, version, spec) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_spec_path(): + expected = { + "project": "clam", + "location": "whelk", + "api": "octopus", + "version": "oyster", + "spec": "nudibranch", + } + path = ApiHubClient.spec_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_spec_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_version_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + expected = ( + "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( + project=project, + location=location, + api=api, + version=version, + ) ) + actual = ApiHubClient.version_path(project, location, api, version) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_version_path(): + expected = { + "project": "scallop", + "location": "abalone", + "api": "squid", + "version": "clam", + } + path = ApiHubClient.version_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_version_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + actual = ApiHubClient.common_billing_account_path(billing_account) + assert expected == actual - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ApiHubClient.common_billing_account_path(**expected) -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ApiHubClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ApiHubClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ApiHubClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ApiHubClient.common_location_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() - + prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubAsyncClient( +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_list_locations_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.delete_operation(request) + # Establish that the response is the type that we expect. + assert response is None -def test_get_location(transport: str = "grpc"): + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_get_location_field_headers(): - client = ApiHubClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -31071,7 +14654,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubClient( @@ -31088,8 +14670,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubClient, transports.ApiHubGrpcTransport), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport), + (ApiHubClient, transports.ApiHubRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py index bf0a3da3c9e3..525149783ded 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -48,7 +48,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.api_hub_dependencies import ( - ApiHubDependenciesAsyncClient, ApiHubDependenciesClient, pagers, transports, @@ -222,11 +221,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -315,7 +309,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -395,8 +388,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubDependenciesClient, "grpc"), - (ApiHubDependenciesAsyncClient, "grpc_asyncio"), (ApiHubDependenciesClient, "rest"), ], ) @@ -423,8 +414,6 @@ def test_api_hub_dependencies_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubDependenciesGrpcTransport, "grpc"), - (transports.ApiHubDependenciesGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -449,8 +438,6 @@ def test_api_hub_dependencies_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubDependenciesClient, "grpc"), - (ApiHubDependenciesAsyncClient, "grpc_asyncio"), (ApiHubDependenciesClient, "rest"), ], ) @@ -484,24 +471,17 @@ def test_api_hub_dependencies_client_from_service_account_file( def test_api_hub_dependencies_client_get_transport_class(): transport = ApiHubDependenciesClient.get_transport_class() available_transports = [ - transports.ApiHubDependenciesGrpcTransport, transports.ApiHubDependenciesRestTransport, ] assert transport in available_transports - transport = ApiHubDependenciesClient.get_transport_class("grpc") - assert transport == transports.ApiHubDependenciesGrpcTransport + transport = ApiHubDependenciesClient.get_transport_class("rest") + assert transport == transports.ApiHubDependenciesRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -510,11 +490,6 @@ def test_api_hub_dependencies_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_client_options( client_class, transport_class, transport_name ): @@ -648,30 +623,6 @@ def test_api_hub_dependencies_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - "true", - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - "false", - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, @@ -691,11 +642,6 @@ def test_api_hub_dependencies_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_dependencies_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -799,19 +745,12 @@ def test_api_hub_dependencies_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", [ApiHubDependenciesClient, ApiHubDependenciesAsyncClient] -) +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) @mock.patch.object( ApiHubDependenciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -903,19 +842,12 @@ def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_cl ) -@pytest.mark.parametrize( - "client_class", [ApiHubDependenciesClient, ApiHubDependenciesAsyncClient] -) +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) @mock.patch.object( ApiHubDependenciesClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -992,12 +924,6 @@ def test_api_hub_dependencies_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -1029,18 +955,6 @@ def test_api_hub_dependencies_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, @@ -1073,96 +987,6 @@ def test_api_hub_dependencies_client_client_options_credentials_file( ) -def test_api_hub_dependencies_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubDependenciesClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_dependencies_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1170,34 +994,119 @@ def test_api_hub_dependencies_client_create_channel_credentials_file( dict, ], ) -def test_create_dependency(request_type, transport: str = "grpc"): +def test_create_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dependency"] = { + "name": "name_value", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.create_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) @@ -1207,66 +1116,13 @@ def test_create_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_create_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest() - - -def test_create_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateDependencyRequest( - parent="parent_value", - dependency_id="dependency_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest( - parent="parent_value", - dependency_id="dependency_id_value", - ) - - -def test_create_dependency_use_cached_wrapped_rpc(): +def test_create_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1284,6 +1140,7 @@ def test_create_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_dependency ] = mock_rpc + request = {} client.create_dependency(request) @@ -1297,287 +1154,235 @@ def test_create_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_dependency_rest_required_fields( + request_type=apihub_service.CreateDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.create_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_dependency - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_dependency - ] = mock_rpc - - request = {} - await client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_dependency(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("dependency_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.create_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) -@pytest.mark.asyncio -async def test_create_dependency_async_from_dict(): - await test_create_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDependencyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value = common_fields.Dependency() - client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dependencyId",)) + & set( + ( + "parent", + "dependency", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDependencyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDependencyRequest.pb( + apihub_service.CreateDependencyRequest() ) - await client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) -def test_create_dependency_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_dependency( - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].dependency_id - mock_val = "dependency_id_value" - assert arg == mock_val - - -def test_create_dependency_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dependency( - apihub_service.CreateDependencyRequest(), - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dependency(request) -@pytest.mark.asyncio -async def test_create_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_create_dependency_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dependency( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", dependency=common_fields.Dependency(name="name_value"), dependency_id="dependency_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].dependency_id - mock_val = "dependency_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) + + +def test_create_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_dependency( + client.create_dependency( apihub_service.CreateDependencyRequest(), parent="parent_value", dependency=common_fields.Dependency(name="name_value"), @@ -1585,6 +1390,12 @@ async def test_create_dependency_flattened_error_async(): ) +def test_create_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1592,34 +1403,38 @@ async def test_create_dependency_flattened_error_async(): dict, ], ) -def test_get_dependency(request_type, transport: str = "grpc"): +def test_get_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.get_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDependencyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dependency(request) + + # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) assert response.name == "name_value" assert response.state == common_fields.Dependency.State.PROPOSED @@ -1627,60 +1442,13 @@ def test_get_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_get_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest() - - -def test_get_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDependencyRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest( - name="name_value", - ) - - -def test_get_dependency_use_cached_wrapped_rpc(): +def test_get_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1696,6 +1464,7 @@ def test_get_dependency_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc + request = {} client.get_dependency(request) @@ -1709,207 +1478,218 @@ def test_get_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.get_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest() - - -@pytest.mark.asyncio -async def test_get_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_dependency_rest_required_fields( + request_type=apihub_service.GetDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubDependenciesRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_dependency - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_dependency - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_dependency(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_dependency(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.get_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dependency(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_get_dependency_async_from_dict(): - await test_get_dependency_async(request_type=dict) +def test_get_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -def test_get_dependency_field_headers(): - client = ApiHubDependenciesClient( + unset_fields = transport.get_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDependencyRequest.pb( + apihub_service.GetDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDependencyRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value = common_fields.Dependency() - client.get_dependency(request) + request = apihub_service.GetDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_get_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +def test_get_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDependencyRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDependencyRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - await client.get_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dependency(request) -def test_get_dependency_flattened(): +def test_get_dependency_rest_flattened(): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dependency( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) -def test_get_dependency_flattened_error(): +def test_get_dependency_rest_flattened_error(transport: str = "rest"): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1921,49 +1701,11 @@ def test_get_dependency_flattened_error(): ) -@pytest.mark.asyncio -async def test_get_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dependency( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dependency( - apihub_service.GetDependencyRequest(), - name="name_value", - ) - @pytest.mark.parametrize( "request_type", @@ -1972,34 +1714,123 @@ async def test_get_dependency_flattened_error_async(): dict, ], ) -def test_update_dependency(request_type, transport: str = "grpc"): +def test_update_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request_init["dependency"] = { + "name": "projects/sample1/locations/sample2/dependencies/sample3", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.update_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) @@ -2009,60 +1840,13 @@ def test_update_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_update_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -def test_update_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateDependencyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -def test_update_dependency_use_cached_wrapped_rpc(): +def test_update_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2080,6 +1864,7 @@ def test_update_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_dependency ] = mock_rpc + request = {} client.update_dependency(request) @@ -2093,284 +1878,249 @@ def test_update_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.update_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -@pytest.mark.asyncio -async def test_update_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_dependency_rest_required_fields( + request_type=apihub_service.UpdateDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_dependency - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubDependenciesRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_dependency - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_dependency(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_dependency(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.update_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) -@pytest.mark.asyncio -async def test_update_dependency_async_from_dict(): - await test_update_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDependencyRequest() - - request.dependency.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value = common_fields.Dependency() - client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_update_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dependency.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "dependency", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDependencyRequest() - - request.dependency.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDependencyRequest.pb( + apihub_service.UpdateDependencyRequest() ) - await client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dependency.name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) -def test_update_dependency_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_dependency( - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_dependency_flattened_error(): +def test_update_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dependency( - apihub_service.UpdateDependencyRequest(), - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_dependency(request) -@pytest.mark.asyncio -async def test_update_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_update_dependency_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_dependency( + # get arguments that satisfy an http rule for this method + sample_request = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( dependency=common_fields.Dependency(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_dependency( + client.update_dependency( apihub_service.UpdateDependencyRequest(), dependency=common_fields.Dependency(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2378,92 +2128,41 @@ async def test_update_dependency_flattened_error_async(): dict, ], ) -def test_delete_dependency(request_type, transport: str = "grpc"): +def test_delete_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dependency(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest() - - -def test_delete_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteDependencyRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest( - name="name_value", - ) - - -def test_delete_dependency_use_cached_wrapped_rpc(): +def test_delete_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2481,6 +2180,7 @@ def test_delete_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_dependency ] = mock_rpc + request = {} client.delete_dependency(request) @@ -2494,252 +2194,222 @@ def test_delete_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest() - - -@pytest.mark.asyncio -async def test_delete_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_dependency_rest_required_fields( + request_type=apihub_service.DeleteDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubDependenciesRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_dependency - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_dependency - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_dependency(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_dependency(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dependency(request) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDependencyRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the response is the type that we expect. - assert response is None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) -@pytest.mark.asyncio -async def test_delete_dependency_async_from_dict(): - await test_delete_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_delete_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDependencyRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value = None - client.delete_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_delete_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDependencyRequest.pb( + apihub_service.DeleteDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDependencyRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dependency(request) + request = apihub_service.DeleteDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -def test_delete_dependency_flattened(): +def test_delete_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dependency( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dependency(request) -def test_delete_dependency_flattened_error(): +def test_delete_dependency_rest_flattened(): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dependency( - apihub_service.DeleteDependencyRequest(), - name="name_value", - ) - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None -@pytest.mark.asyncio -async def test_delete_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dependency( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( +def test_delete_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_dependency( + client.delete_dependency( apihub_service.DeleteDependencyRequest(), name="name_value", ) +def test_delete_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2747,99 +2417,46 @@ async def test_delete_dependency_flattened_error_async(): dict, ], ) -def test_list_dependencies(request_type, transport: str = "grpc"): +def test_list_dependencies_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse( next_page_token="next_page_token_value", ) - response = client.list_dependencies(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDependenciesRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDependenciesPager) assert response.next_page_token == "next_page_token_value" -def test_list_dependencies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_dependencies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest() - - -def test_list_dependencies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListDependenciesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_dependencies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_dependencies_use_cached_wrapped_rpc(): +def test_list_dependencies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2857,6 +2474,7 @@ def test_list_dependencies_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_dependencies ] = mock_rpc + request = {} client.list_dependencies(request) @@ -2870,277 +2488,256 @@ def test_list_dependencies_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_dependencies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_dependencies_rest_required_fields( + request_type=apihub_service.ListDependenciesRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_dependencies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_dependencies_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_dependencies - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_dependencies - ] = mock_rpc - - request = {} - await client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_dependencies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_dependencies_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListDependenciesRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_dependencies(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDependenciesRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDependenciesAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) -@pytest.mark.asyncio -async def test_list_dependencies_async_from_dict(): - await test_list_dependencies_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_dependencies_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_dependencies_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDependenciesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value = apihub_service.ListDependenciesResponse() - client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_dependencies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_dependencies_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dependencies_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDependenciesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDependenciesRequest.pb( + apihub_service.ListDependenciesRequest() ) - await client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDependenciesResponse.to_json( + apihub_service.ListDependenciesResponse() + ) -def test_list_dependencies_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListDependenciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDependenciesResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_dependencies( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_dependencies_flattened_error(): +def test_list_dependencies_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDependenciesRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dependencies( - apihub_service.ListDependenciesRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dependencies(request) -@pytest.mark.asyncio -async def test_list_dependencies_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_list_dependencies_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dependencies( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dependencies(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_dependencies_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( +def test_list_dependencies_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_dependencies( + client.list_dependencies( apihub_service.ListDependenciesRequest(), parent="parent_value", ) -def test_list_dependencies_pager(transport_name: str = "grpc"): +def test_list_dependencies_rest_pager(transport: str = "rest"): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListDependenciesResponse( dependencies=[ common_fields.Dependency(), @@ -3165,3927 +2762,831 @@ def test_list_dependencies_pager(transport_name: str = "grpc"): common_fields.Dependency(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDependenciesResponse.to_json(x) for x in response ) - pager = client.list_dependencies(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_dependencies(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Dependency) for i in results) + pages = list(client.list_dependencies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + -def test_list_dependencies_pages(transport_name: str = "grpc"): - client = ApiHubDependenciesClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dependencies(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_dependencies_async_pager(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dependencies( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Dependency) for i in responses) - - -@pytest.mark.asyncio -async def test_list_dependencies_async_pages(): - client = ApiHubDependenciesAsyncClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_dependencies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateDependencyRequest, - dict, - ], -) -def test_create_dependency_rest(request_type): - client = ApiHubDependenciesClient( + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["dependency"] = { - "name": "name_value", - "consumer": { - "operation_resource_name": "operation_resource_name_value", - "external_api_resource_name": "external_api_resource_name_value", - "display_name": "display_name_value", - }, - "supplier": {}, - "state": 1, - "description": "description_value", - "discovery_mode": 1, - "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, - "create_time": {}, - "update_time": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dependency"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dependency"][field])): - del request_init["dependency"][field][i][subfield] - else: - del request_init["dependency"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_create_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_dependency - ] = mock_rpc - - request = {} - client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_dependency_rest_required_fields( - request_type=apihub_service.CreateDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dependency._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("dependency_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_dependency._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("dependencyId",)) - & set( - ( - "parent", - "dependency", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateDependencyRequest.pb( - apihub_service.CreateDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.CreateDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.create_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dependency(request) - - -def test_create_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/dependencies" - % client.transport._host, - args[1], - ) - - -def test_create_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dependency( - apihub_service.CreateDependencyRequest(), - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) - - -def test_create_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDependencyRequest, - dict, - ], -) -def test_get_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_get_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc - - request = {} - client.get_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_dependency_rest_required_fields( - request_type=apihub_service.GetDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_dependency._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDependencyRequest.pb( - apihub_service.GetDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.GetDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.get_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_dependency(request) - - -def test_get_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_get_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dependency( - apihub_service.GetDependencyRequest(), - name="name_value", - ) - - -def test_get_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateDependencyRequest, - dict, - ], -) -def test_update_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - request_init["dependency"] = { - "name": "projects/sample1/locations/sample2/dependencies/sample3", - "consumer": { - "operation_resource_name": "operation_resource_name_value", - "external_api_resource_name": "external_api_resource_name_value", - "display_name": "display_name_value", - }, - "supplier": {}, - "state": 1, - "description": "description_value", - "discovery_mode": 1, - "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, - "create_time": {}, - "update_time": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dependency"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dependency"][field])): - del request_init["dependency"][field][i][subfield] - else: - del request_init["dependency"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_update_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_dependency - ] = mock_rpc - - request = {} - client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_dependency_rest_required_fields( - request_type=apihub_service.UpdateDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_dependency._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_dependency._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "dependency", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateDependencyRequest.pb( - apihub_service.UpdateDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.UpdateDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.update_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_dependency(request) - - -def test_update_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_update_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dependency( - apihub_service.UpdateDependencyRequest(), - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteDependencyRequest, - dict, - ], -) -def test_delete_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_dependency(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_dependency - ] = mock_rpc - - request = {} - client.delete_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_dependency_rest_required_fields( - request_type=apihub_service.DeleteDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_dependency._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteDependencyRequest.pb( - apihub_service.DeleteDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_dependency(request) - - -def test_delete_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dependency( - apihub_service.DeleteDependencyRequest(), - name="name_value", - ) - - -def test_delete_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDependenciesRequest, - dict, - ], -) -def test_list_dependencies_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_dependencies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDependenciesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_dependencies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_dependencies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_dependencies - ] = mock_rpc - - request = {} - client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_dependencies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_dependencies_rest_required_fields( - request_type=apihub_service.ListDependenciesRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_dependencies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_dependencies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_dependencies(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_dependencies_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_dependencies._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dependencies_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListDependenciesRequest.pb( - apihub_service.ListDependenciesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListDependenciesResponse.to_json( - apihub_service.ListDependenciesResponse() - ) - - request = apihub_service.ListDependenciesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListDependenciesResponse() - - client.list_dependencies( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_dependencies_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListDependenciesRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_dependencies(request) - - -def test_list_dependencies_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_dependencies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/dependencies" - % client.transport._host, - args[1], - ) - - -def test_list_dependencies_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dependencies( - apihub_service.ListDependenciesRequest(), - parent="parent_value", - ) - - -def test_list_dependencies_rest_pager(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListDependenciesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_dependencies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Dependency) for i in results) - - pages = list(client.list_dependencies(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubDependenciesClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubDependenciesGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - transports.ApiHubDependenciesRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubDependenciesClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubDependenciesGrpcTransport, - ) - - -def test_api_hub_dependencies_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubDependenciesTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_dependencies_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubDependenciesTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_dependency", - "get_dependency", - "update_dependency", - "delete_dependency", - "list_dependencies", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_dependencies_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubDependenciesTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_dependencies_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubDependenciesTransport() - adc.assert_called_once() - - -def test_api_hub_dependencies_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubDependenciesClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - transports.ApiHubDependenciesRestTransport, - ], -) -def test_api_hub_dependencies_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubDependenciesGrpcTransport, grpc_helpers), - (transports.ApiHubDependenciesGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_dependencies_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubDependenciesRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_dependencies_host_no_port(transport_name): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_dependencies_host_with_port(transport_name): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_dependencies_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubDependenciesClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubDependenciesClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_dependency._session - session2 = client2.transport.create_dependency._session - assert session1 != session2 - session1 = client1.transport.get_dependency._session - session2 = client2.transport.get_dependency._session - assert session1 != session2 - session1 = client1.transport.update_dependency._session - session2 = client2.transport.update_dependency._session - assert session1 != session2 - session1 = client1.transport.delete_dependency._session - session2 = client2.transport.delete_dependency._session - assert session1 != session2 - session1 = client1.transport.list_dependencies._session - session2 = client2.transport.list_dependencies._session - assert session1 != session2 - - -def test_api_hub_dependencies_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubDependenciesGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_dependencies_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubDependenciesGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_attribute_path(): - project = "squid" - location = "clam" - attribute = "whelk" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "octopus", - "location": "oyster", - "attribute": "nudibranch", - } - path = ApiHubDependenciesClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_attribute_path(path) - assert expected == actual - - -def test_dependency_path(): - project = "cuttlefish" - location = "mussel" - dependency = "winkle" - expected = ( - "projects/{project}/locations/{location}/dependencies/{dependency}".format( - project=project, - location=location, - dependency=dependency, - ) - ) - actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) - assert expected == actual - - -def test_parse_dependency_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "dependency": "abalone", - } - path = ApiHubDependenciesClient.dependency_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_dependency_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ApiHubDependenciesClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubDependenciesClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ApiHubDependenciesClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubDependenciesClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ApiHubDependenciesClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubDependenciesClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ApiHubDependenciesClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubDependenciesClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ApiHubDependenciesClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubDependenciesClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = ApiHubDependenciesClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = ApiHubDependenciesClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubDependenciesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubDependenciesClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_api_hub_dependencies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_api_hub_dependencies_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dependency", + "get_dependency", + "update_dependency", + "delete_dependency", + "list_dependencies", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_api_hub_dependencies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_api_hub_dependencies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_dependencies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubDependenciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubDependenciesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_no_port(transport_name): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_with_port(transport_name): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubDependenciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubDependenciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_dependency._session + session2 = client2.transport.create_dependency._session + assert session1 != session2 + session1 = client1.transport.get_dependency._session + session2 = client2.transport.get_dependency._session + assert session1 != session2 + session1 = client1.transport.update_dependency._session + session2 = client2.transport.update_dependency._session + assert session1 != session2 + session1 = client1.transport.delete_dependency._session + session2 = client2.transport.delete_dependency._session + assert session1 != session2 + session1 = client1.transport.list_dependencies._session + session2 = client2.transport.list_dependencies._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubDependenciesClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_dependency_path(): + project = "cuttlefish" + location = "mussel" + dependency = "winkle" + expected = ( + "projects/{project}/locations/{location}/dependencies/{dependency}".format( + project=project, + location=location, + dependency=dependency, + ) ) + actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_dependency_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "dependency": "abalone", + } + path = ApiHubDependenciesClient.dependency_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_dependency_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubDependenciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubDependenciesClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubDependenciesClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubDependenciesClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubDependenciesClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubDependenciesClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubDependenciesClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubDependenciesClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubDependenciesClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubDependenciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ApiHubDependenciesClient(credentials=ga_credentials.AnonymousCredentials()) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -7103,7 +3604,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubDependenciesClient( @@ -7120,11 +3620,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ), + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index 97adab5ecf39..dc92c3df1475 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -46,7 +46,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.api_hub_plugin import ( - ApiHubPluginAsyncClient, ApiHubPluginClient, transports, ) @@ -195,11 +194,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -284,7 +278,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -364,8 +357,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubPluginClient, "grpc"), - (ApiHubPluginAsyncClient, "grpc_asyncio"), (ApiHubPluginClient, "rest"), ], ) @@ -390,8 +381,6 @@ def test_api_hub_plugin_client_from_service_account_info(client_class, transport @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubPluginGrpcTransport, "grpc"), - (transports.ApiHubPluginGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -416,8 +405,6 @@ def test_api_hub_plugin_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubPluginClient, "grpc"), - (ApiHubPluginAsyncClient, "grpc_asyncio"), (ApiHubPluginClient, "rest"), ], ) @@ -449,24 +436,17 @@ def test_api_hub_plugin_client_from_service_account_file(client_class, transport def test_api_hub_plugin_client_get_transport_class(): transport = ApiHubPluginClient.get_transport_class() available_transports = [ - transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginRestTransport, ] assert transport in available_transports - transport = ApiHubPluginClient.get_transport_class("grpc") - assert transport == transports.ApiHubPluginGrpcTransport + transport = ApiHubPluginClient.get_transport_class("rest") + assert transport == transports.ApiHubPluginRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -475,11 +455,6 @@ def test_api_hub_plugin_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_client_options( client_class, transport_class, transport_name ): @@ -613,20 +588,6 @@ def test_api_hub_plugin_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc", "true"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc", "false"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "true"), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "false"), ], @@ -636,11 +597,6 @@ def test_api_hub_plugin_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_plugin_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -744,15 +700,10 @@ def test_api_hub_plugin_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ApiHubPluginClient, ApiHubPluginAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( ApiHubPluginClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubPluginClient) ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -844,17 +795,12 @@ def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ApiHubPluginClient, ApiHubPluginAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -931,12 +877,6 @@ def test_api_hub_plugin_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -968,18 +908,6 @@ def test_api_hub_plugin_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ApiHubPluginClient, - transports.ApiHubPluginGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", None), ], ) @@ -1007,94 +935,6 @@ def test_api_hub_plugin_client_client_options_credentials_file( ) -def test_api_hub_plugin_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubPluginClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ApiHubPluginClient, - transports.ApiHubPluginGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_plugin_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1102,32 +942,36 @@ def test_api_hub_plugin_client_create_channel_credentials_file( dict, ], ) -def test_get_plugin(request_type, transport: str = "grpc"): +def test_get_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.get_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.GetPluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1137,60 +981,13 @@ def test_get_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_get_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest() - - -def test_get_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.GetPluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest( - name="name_value", - ) - - -def test_get_plugin_use_cached_wrapped_rpc(): +def test_get_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1206,6 +1003,7 @@ def test_get_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc + request = {} client.get_plugin(request) @@ -1219,260 +1017,230 @@ def test_get_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.get_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest() - - -@pytest.mark.asyncio -async def test_get_plugin_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_plugin - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_plugin - ] = mock_rpc - - request = {} - await client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - +def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): + transport_class = transports.ApiHubPluginRestTransport -@pytest.mark.asyncio -async def test_get_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.GetPluginRequest -): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.get_plugin(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.GetPluginRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_get_plugin_async_from_dict(): - await test_get_plugin_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_plugin_field_headers(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.GetPluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.GetPluginRequest() + response_value = Response() + response_value.status_code = 200 - request.name = "name_value" + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - await client.get_plugin(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_plugin(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + unset_fields = transport.get_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_plugin_flattened_error(): - client = ApiHubPluginClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_get_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_get_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.GetPluginRequest.pb( + plugin_service.GetPluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( + plugin_service.Plugin() + ) + + request = plugin_service.GetPluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() + client.get_plugin( - plugin_service.GetPluginRequest(), - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -@pytest.mark.asyncio -async def test_get_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( +def test_get_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.GetPluginRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_plugin( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_plugin(request) + + +def test_get_plugin_rest_flattened(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( +def test_get_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_plugin( + client.get_plugin( plugin_service.GetPluginRequest(), name="name_value", ) +def test_get_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1480,32 +1248,36 @@ async def test_get_plugin_flattened_error_async(): dict, ], ) -def test_enable_plugin(request_type, transport: str = "grpc"): +def test_enable_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.enable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.EnablePluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1515,60 +1287,13 @@ def test_enable_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_enable_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest() - - -def test_enable_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.EnablePluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest( - name="name_value", - ) - - -def test_enable_plugin_use_cached_wrapped_rpc(): +def test_enable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1584,6 +1309,7 @@ def test_enable_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc + request = {} client.enable_plugin(request) @@ -1597,262 +1323,234 @@ def test_enable_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_enable_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.enable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest() - - -@pytest.mark.asyncio -async def test_enable_plugin_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_enable_plugin_rest_required_fields( + request_type=plugin_service.EnablePluginRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubPluginRestTransport - # Ensure method has been cached - assert ( - client._client._transport.enable_plugin - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.enable_plugin - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.enable_plugin(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.enable_plugin(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_enable_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.EnablePluginRequest -): - client = ApiHubPluginAsyncClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.enable_plugin(request) + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.EnablePluginRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_enable_plugin_async_from_dict(): - await test_enable_plugin_async(request_type=dict) + response = client.enable_plugin(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_enable_plugin_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.EnablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.enable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_enable_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.enable_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_enable_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), ) + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_enable_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.EnablePluginRequest.pb( + plugin_service.EnablePluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.EnablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( plugin_service.Plugin() ) - await client.enable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_enable_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = plugin_service.EnablePluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.enable_plugin( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_enable_plugin_flattened_error(): +def test_enable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.EnablePluginRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_plugin( - plugin_service.EnablePluginRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_plugin(request) -@pytest.mark.asyncio -async def test_enable_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( +def test_enable_plugin_rest_flattened(): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.enable_plugin( + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_enable_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( +def test_enable_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.enable_plugin( + client.enable_plugin( plugin_service.EnablePluginRequest(), name="name_value", ) +def test_enable_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1860,32 +1558,36 @@ async def test_enable_plugin_flattened_error_async(): dict, ], ) -def test_disable_plugin(request_type, transport: str = "grpc"): +def test_disable_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.disable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.DisablePluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1895,60 +1597,13 @@ def test_disable_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_disable_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest() - - -def test_disable_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.DisablePluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest( - name="name_value", - ) - - -def test_disable_plugin_use_cached_wrapped_rpc(): +def test_disable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1964,6 +1619,7 @@ def test_disable_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc + request = {} client.disable_plugin(request) @@ -1977,380 +1633,44 @@ def test_disable_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_disable_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.disable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest() - - -@pytest.mark.asyncio -async def test_disable_plugin_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_disable_plugin_rest_required_fields( + request_type=plugin_service.DisablePluginRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubPluginRestTransport - # Ensure method has been cached - assert ( - client._client._transport.disable_plugin - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.disable_plugin - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.disable_plugin(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.disable_plugin(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_disable_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.DisablePluginRequest -): - client = ApiHubPluginAsyncClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.DisablePluginRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -@pytest.mark.asyncio -async def test_disable_plugin_async_from_dict(): - await test_disable_plugin_async(request_type=dict) - - -def test_disable_plugin_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.DisablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_disable_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.DisablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - await client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_disable_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.disable_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_disable_plugin_flattened_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_disable_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.disable_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_disable_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.GetPluginRequest, - dict, - ], -) -def test_get_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_get_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc - - request = {} - client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = plugin_service.Plugin() @@ -2365,9 +1685,10 @@ def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2380,24 +1701,24 @@ def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRe response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_plugin(request) + response = client.disable_plugin(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_plugin_rest_unset_required_fields(): +def test_disable_plugin_rest_unset_required_fields(): transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_plugin._get_unset_required_fields({}) + unset_fields = transport.disable_plugin._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_plugin_rest_interceptors(null_interceptor): +def test_disable_plugin_rest_interceptors(null_interceptor): transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2410,14 +1731,14 @@ def test_get_plugin_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_get_plugin" + transports.ApiHubPluginRestInterceptor, "post_disable_plugin" ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_get_plugin" + transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = plugin_service.GetPluginRequest.pb( - plugin_service.GetPluginRequest() + pb_message = plugin_service.DisablePluginRequest.pb( + plugin_service.DisablePluginRequest() ) transcode.return_value = { "method": "post", @@ -2433,7 +1754,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): plugin_service.Plugin() ) - request = plugin_service.GetPluginRequest() + request = plugin_service.DisablePluginRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2441,7 +1762,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = plugin_service.Plugin() - client.get_plugin( + client.disable_plugin( request, metadata=[ ("key", "val"), @@ -2453,8 +1774,8 @@ def test_get_plugin_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.GetPluginRequest +def test_disable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.DisablePluginRequest ): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2474,10 +1795,10 @@ def test_get_plugin_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_plugin(request) + client.disable_plugin(request) -def test_get_plugin_rest_flattened(): +def test_disable_plugin_rest_flattened(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2506,19 +1827,20 @@ def test_get_plugin_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_plugin(**mock_args) + client.disable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" + % client.transport._host, args[1], ) -def test_get_plugin_rest_flattened_error(transport: str = "rest"): +def test_disable_plugin_rest_flattened_error(transport: str = "rest"): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2527,2566 +1849,806 @@ def test_get_plugin_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_plugin( - plugin_service.GetPluginRequest(), + client.disable_plugin( + plugin_service.DisablePluginRequest(), name="name_value", ) -def test_get_plugin_rest_error(): +def test_disable_plugin_rest_error(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.EnablePluginRequest, - dict, - ], -) -def test_enable_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.enable_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_enable_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc - - request = {} - client.enable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enable_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_enable_plugin_rest_required_fields( - request_type=plugin_service.EnablePluginRequest, -): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.enable_plugin(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_enable_plugin_rest_unset_required_fields(): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.enable_plugin._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_enable_plugin_rest_interceptors(null_interceptor): +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubPluginRestInterceptor(), - ) - client = ApiHubPluginClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_enable_plugin" - ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = plugin_service.EnablePluginRequest.pb( - plugin_service.EnablePluginRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = plugin_service.Plugin.to_json( - plugin_service.Plugin() - ) - - request = plugin_service.EnablePluginRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = plugin_service.Plugin() - - client.enable_plugin( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_enable_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.EnablePluginRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.enable_plugin(request) - - -def test_enable_plugin_rest_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.enable_plugin(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" - % client.transport._host, - args[1], - ) - - -def test_enable_plugin_rest_flattened_error(transport: str = "rest"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.enable_plugin( - plugin_service.EnablePluginRequest(), - name="name_value", - ) - - -def test_enable_plugin_rest_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.DisablePluginRequest, - dict, - ], -) -def test_disable_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.disable_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_disable_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.disable_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc - - request = {} - client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.disable_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_disable_plugin_rest_required_fields( - request_type=plugin_service.DisablePluginRequest, -): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.disable_plugin(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_disable_plugin_rest_unset_required_fields(): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.disable_plugin._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_disable_plugin_rest_interceptors(null_interceptor): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubPluginRestInterceptor(), - ) - client = ApiHubPluginClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_disable_plugin" - ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = plugin_service.DisablePluginRequest.pb( - plugin_service.DisablePluginRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = plugin_service.Plugin.to_json( - plugin_service.Plugin() - ) - - request = plugin_service.DisablePluginRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = plugin_service.Plugin() - - client.disable_plugin( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_disable_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.DisablePluginRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.disable_plugin(request) - - -def test_disable_plugin_rest_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.disable_plugin(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" - % client.transport._host, - args[1], - ) - - -def test_disable_plugin_rest_flattened_error(transport: str = "rest"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -def test_disable_plugin_rest_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options={"scopes": ["1", "2"]}, transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubPluginClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubPluginGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - transports.ApiHubPluginRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubPluginClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubPluginGrpcTransport, - ) - - -def test_api_hub_plugin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubPluginTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_plugin_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubPluginTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_plugin", - "enable_plugin", - "disable_plugin", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_plugin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubPluginTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_plugin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubPluginTransport() - adc.assert_called_once() - - -def test_api_hub_plugin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubPluginClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - ], -) -def test_api_hub_plugin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - transports.ApiHubPluginRestTransport, - ], -) -def test_api_hub_plugin_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubPluginGrpcTransport, grpc_helpers), - (transports.ApiHubPluginGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_plugin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubPluginRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_plugin_host_no_port(transport_name): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_plugin_host_with_port(transport_name): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_plugin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubPluginClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubPluginClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_plugin._session - session2 = client2.transport.get_plugin._session - assert session1 != session2 - session1 = client1.transport.enable_plugin._session - session2 = client2.transport.enable_plugin._session - assert session1 != session2 - session1 = client1.transport.disable_plugin._session - session2 = client2.transport.disable_plugin._session - assert session1 != session2 - - -def test_api_hub_plugin_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubPluginGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_plugin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubPluginGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_attribute_path(): - project = "squid" - location = "clam" - attribute = "whelk" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubPluginClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "octopus", - "location": "oyster", - "attribute": "nudibranch", - } - path = ApiHubPluginClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_attribute_path(path) - assert expected == actual - - -def test_plugin_path(): - project = "cuttlefish" - location = "mussel" - plugin = "winkle" - expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( - project=project, - location=location, - plugin=plugin, - ) - actual = ApiHubPluginClient.plugin_path(project, location, plugin) - assert expected == actual - - -def test_parse_plugin_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "plugin": "abalone", - } - path = ApiHubPluginClient.plugin_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_plugin_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubPluginClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ApiHubPluginClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubPluginClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ApiHubPluginClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubPluginClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ApiHubPluginClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubPluginClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ApiHubPluginClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubPluginClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ApiHubPluginClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubPluginTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubPluginTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubPluginClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - + ) -def test_delete_operation(transport: str = "grpc"): - client = ApiHubPluginClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options=options, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert response is None + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = ApiHubPluginClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubPluginRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -def test_delete_operation_field_headers(): - client = ApiHubPluginClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubPluginClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) + assert transport.kind == transport_name - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_api_hub_plugin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubPluginTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_api_hub_plugin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubPluginTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_plugin", + "enable_plugin", + "disable_plugin", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" + with pytest.raises(NotImplementedError): + transport.close() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + +def test_api_hub_plugin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubPluginTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) -def test_delete_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_plugin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubPluginTransport() + adc.assert_called_once() + + +def test_api_hub_plugin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubPluginClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubPluginRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_host_no_port(transport_name): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_host_with_port(transport_name): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubPluginClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubPluginClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_plugin._session + session2 = client2.transport.get_plugin._session + assert session1 != session2 + session1 = client1.transport.enable_plugin._session + session2 = client2.transport.enable_plugin._session + assert session1 != session2 + session1 = client1.transport.disable_plugin._session + session2 = client2.transport.disable_plugin._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubPluginClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubPluginClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_plugin_path(): + project = "cuttlefish" + location = "mussel" + plugin = "winkle" + expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( + project=project, + location=location, + plugin=plugin, ) + actual = ApiHubPluginClient.plugin_path(project, location, plugin) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_plugin_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "plugin": "abalone", + } + path = ApiHubPluginClient.plugin_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_plugin_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ApiHubPluginClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubPluginClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubPluginClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubPluginClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubPluginClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubPluginClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubPluginClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubPluginClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubPluginClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubPluginClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ApiHubPluginTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubPluginTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubPluginClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ApiHubPluginClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubPluginAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5104,7 +2666,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubPluginClient( @@ -5121,8 +2682,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport), - (ApiHubPluginAsyncClient, transports.ApiHubPluginGrpcAsyncIOTransport), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py index e951e616c819..15813aa93505 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.host_project_registration_service import ( - HostProjectRegistrationServiceAsyncClient, HostProjectRegistrationServiceClient, pagers, transports, @@ -230,11 +229,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -333,11 +327,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -421,8 +410,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (HostProjectRegistrationServiceClient, "grpc"), - (HostProjectRegistrationServiceAsyncClient, "grpc_asyncio"), (HostProjectRegistrationServiceClient, "rest"), ], ) @@ -449,8 +436,6 @@ def test_host_project_registration_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.HostProjectRegistrationServiceGrpcTransport, "grpc"), - (transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), (transports.HostProjectRegistrationServiceRestTransport, "rest"), ], ) @@ -475,8 +460,6 @@ def test_host_project_registration_service_client_service_account_always_use_jwt @pytest.mark.parametrize( "client_class,transport_name", [ - (HostProjectRegistrationServiceClient, "grpc"), - (HostProjectRegistrationServiceAsyncClient, "grpc_asyncio"), (HostProjectRegistrationServiceClient, "rest"), ], ) @@ -510,28 +493,17 @@ def test_host_project_registration_service_client_from_service_account_file( def test_host_project_registration_service_client_get_transport_class(): transport = HostProjectRegistrationServiceClient.get_transport_class() available_transports = [ - transports.HostProjectRegistrationServiceGrpcTransport, transports.HostProjectRegistrationServiceRestTransport, ] assert transport in available_transports - transport = HostProjectRegistrationServiceClient.get_transport_class("grpc") - assert transport == transports.HostProjectRegistrationServiceGrpcTransport + transport = HostProjectRegistrationServiceClient.get_transport_class("rest") + assert transport == transports.HostProjectRegistrationServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -544,11 +516,6 @@ def test_host_project_registration_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_client_options( client_class, transport_class, transport_name ): @@ -686,30 +653,6 @@ def test_host_project_registration_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - "true", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - "false", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -729,11 +672,6 @@ def test_host_project_registration_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_host_project_registration_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -837,20 +775,12 @@ def test_host_project_registration_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", - [HostProjectRegistrationServiceClient, HostProjectRegistrationServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) @mock.patch.object( HostProjectRegistrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_source( client_class, ): @@ -944,20 +874,12 @@ def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_sou ) -@pytest.mark.parametrize( - "client_class", - [HostProjectRegistrationServiceClient, HostProjectRegistrationServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) @mock.patch.object( HostProjectRegistrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -1038,16 +960,6 @@ def test_host_project_registration_service_client_client_api_endpoint(client_cla @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -1083,18 +995,6 @@ def test_host_project_registration_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -1127,96 +1027,6 @@ def test_host_project_registration_service_client_client_options_credentials_fil ) -def test_host_project_registration_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = HostProjectRegistrationServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_host_project_registration_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1224,34 +1034,114 @@ def test_host_project_registration_service_client_create_channel_credentials_fil dict, ], ) -def test_create_host_project_registration(request_type, transport: str = "grpc"): +def test_create_host_project_registration_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["host_project_registration"] = { + "name": "name_value", + "gcp_project": "gcp_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration( + # Determine if the message type is proto-plus or protobuf + test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ + "host_project_registration" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "host_project_registration" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["host_project_registration"][field]) + ): + del request_init["host_project_registration"][field][i][subfield] + else: + del request_init["host_project_registration"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( name="name_value", gcp_project="gcp_project_value", ) - response = client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.CreateHostProjectRegistrationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_host_project_registration(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1261,71 +1151,13 @@ def test_create_host_project_registration(request_type, transport: str = "grpc") assert response.gcp_project == "gcp_project_value" -def test_create_host_project_registration_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - - -def test_create_host_project_registration_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_host_project_registration(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - ) - - -def test_create_host_project_registration_use_cached_wrapped_rpc(): +def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1346,6 +1178,7 @@ def test_create_host_project_registration_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_host_project_registration ] = mock_rpc + request = {} client.create_host_project_registration(request) @@ -1359,231 +1192,268 @@ def test_create_host_project_registration_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_host_project_registration_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["host_project_registration_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.create_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.CreateHostProjectRegistrationRequest() - ) + # verify fields with default values are dropped + assert "hostProjectRegistrationId" not in jsonified_request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_host_project_registration_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == request_init["host_project_registration_id"] + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "hostProjectRegistrationId" + ] = "host_project_registration_id_value" - # Ensure method has been cached - assert ( - client._client._transport.create_host_project_registration - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("host_project_registration_id",)) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_host_project_registration - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == "host_project_registration_id_value" + ) - request = {} - await client.create_host_project_registration(request) + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - await client.create_host_project_registration(request) + response_value = Response() + response_value.status_code = 200 - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_create_host_project_registration_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + response = client.create_host_project_registration(request) + + expected_params = [ + ( + "hostProjectRegistrationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + unset_fields = ( + transport.create_host_project_registration._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("hostProjectRegistrationId",)) + & set( + ( + "parent", + "hostProjectRegistrationId", + "hostProjectRegistration", + ) + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_create_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_create_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.CreateHostProjectRegistrationRequest.pb( + host_project_registration_service.CreateHostProjectRegistrationRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() ) ) - response = await client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] request = ( host_project_registration_service.CreateHostProjectRegistrationRequest() ) - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + client.create_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_create_host_project_registration_async_from_dict(): - await test_create_host_project_registration_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_create_host_project_registration_field_headers(): +def test_create_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.CreateHostProjectRegistrationRequest() - - request.parent = "parent_value" + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -@pytest.mark.asyncio -async def test_create_host_project_registration_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_create_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.CreateHostProjectRegistrationRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - await client.create_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() -def test_create_host_project_registration_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_host_project_registration( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", host_project_registration=host_project_registration_service.HostProjectRegistration( name="name_value" ), host_project_registration_id="host_project_registration_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_host_project_registration(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].host_project_registration - mock_val = host_project_registration_service.HostProjectRegistration( - name="name_value" + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], ) - assert arg == mock_val - arg = args[0].host_project_registration_id - mock_val = "host_project_registration_id_value" - assert arg == mock_val -def test_create_host_project_registration_flattened_error(): +def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1599,67 +1469,11 @@ def test_create_host_project_registration_flattened_error(): ) -@pytest.mark.asyncio -async def test_create_host_project_registration_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_host_project_registration( - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].host_project_registration - mock_val = host_project_registration_service.HostProjectRegistration( - name="name_value" - ) - assert arg == mock_val - arg = args[0].host_project_registration_id - mock_val = "host_project_registration_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_host_project_registration_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_host_project_registration( - host_project_registration_service.CreateHostProjectRegistrationRequest(), - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - @pytest.mark.parametrize( "request_type", @@ -1668,32 +1482,38 @@ async def test_create_host_project_registration_flattened_error_async(): dict, ], ) -def test_get_host_project_registration(request_type, transport: str = "grpc"): +def test_get_host_project_registration_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( name="name_value", gcp_project="gcp_project_value", ) - response = client.get_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = host_project_registration_service.GetHostProjectRegistrationRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1703,69 +1523,13 @@ def test_get_host_project_registration(request_type, transport: str = "grpc"): assert response.gcp_project == "gcp_project_value" -def test_get_host_project_registration_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.GetHostProjectRegistrationRequest() - ) - - -def test_get_host_project_registration_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.GetHostProjectRegistrationRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_host_project_registration(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.GetHostProjectRegistrationRequest( - name="name_value", - ) - - -def test_get_host_project_registration_use_cached_wrapped_rpc(): +def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1786,6 +1550,7 @@ def test_get_host_project_registration_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_host_project_registration ] = mock_rpc + request = {} client.get_host_project_registration(request) @@ -1799,274 +1564,250 @@ def test_get_host_project_registration_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_host_project_registration_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.get_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.GetHostProjectRegistrationRequest() - ) - - -@pytest.mark.asyncio -async def test_get_host_project_registration_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.HostProjectRegistrationServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_host_project_registration - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_host_project_registration - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_host_project_registration(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_host_project_registration(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_host_project_registration_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceAsyncClient( + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.get_host_project_registration(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = host_project_registration_service.GetHostProjectRegistrationRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) -@pytest.mark.asyncio -async def test_get_host_project_registration_async_from_dict(): - await test_get_host_project_registration_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_host_project_registration_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.GetHostProjectRegistrationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value = host_project_registration_service.HostProjectRegistration() - client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_host_project_registration._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_host_project_registration_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.GetHostProjectRegistrationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_get_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_get_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.GetHostProjectRegistrationRequest.pb( + host_project_registration_service.GetHostProjectRegistrationRequest() + ) ) - await client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_host_project_registration_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_host_project_registration( - name="name_value", + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() + ) ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = host_project_registration_service.GetHostProjectRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + + client.get_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_get_host_project_registration_flattened_error(): +def test_get_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_host_project_registration( - host_project_registration_service.GetHostProjectRegistrationRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_host_project_registration(request) -@pytest.mark.asyncio -async def test_get_host_project_registration_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_host_project_registration( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_host_project_registration(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_host_project_registration_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_host_project_registration( + client.get_host_project_registration( host_project_registration_service.GetHostProjectRegistrationRequest(), name="name_value", ) +def test_get_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2074,110 +1815,52 @@ async def test_get_host_project_registration_flattened_error_async(): dict, ], ) -def test_list_host_project_registrations(request_type, transport: str = "grpc"): +def test_list_host_project_registrations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse( next_page_token="next_page_token_value", ) ) - response = client.list_host_project_registrations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_host_project_registrations(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListHostProjectRegistrationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_host_project_registrations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_host_project_registrations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - - -def test_list_host_project_registrations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.ListHostProjectRegistrationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_host_project_registrations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.ListHostProjectRegistrationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_host_project_registrations_use_cached_wrapped_rpc(): +def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2198,6 +1881,7 @@ def test_list_host_project_registrations_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_host_project_registrations ] = mock_rpc + request = {} client.list_host_project_registrations(request) @@ -2211,289 +1895,279 @@ def test_list_host_project_registrations_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_host_project_registrations_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_host_project_registrations_rest_required_fields( + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_host_project_registrations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.ListHostProjectRegistrationsRequest() - ) + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_host_project_registrations - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_host_project_registrations - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - request = {} - await client.list_host_project_registrations(request) + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Designate an appropriate value for the returned response. + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - await client.list_host_project_registrations(request) + response_value = Response() + response_value.status_code = 200 - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Convert return value to protobuf type + return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_host_project_registrations_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + response = client.list_host_project_registrations(request) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHostProjectRegistrationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_from_dict(): - await test_list_host_project_registrations_async(request_type=dict) - - -def test_list_host_project_registrations_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_host_project_registrations_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.ListHostProjectRegistrationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() + unset_fields = transport.list_host_project_registrations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) - client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_host_project_registrations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_host_project_registrations_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.ListHostProjectRegistrationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_list_host_project_registrations", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_list_host_project_registrations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.ListHostProjectRegistrationsRequest.pb( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) ) - await client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } -def test_list_host_project_registrations_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.list_host_project_registrations( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_host_project_registrations_flattened_error(): +def test_list_host_project_registrations_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_host_project_registrations( - host_project_registration_service.ListHostProjectRegistrationsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_host_project_registrations(request) -@pytest.mark.asyncio -async def test_list_host_project_registrations_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_list_host_project_registrations_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_host_project_registrations( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_host_project_registrations(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_host_project_registrations_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_host_project_registrations( + client.list_host_project_registrations( host_project_registration_service.ListHostProjectRegistrationsRequest(), parent="parent_value", ) -def test_list_host_project_registrations_pager(transport_name: str = "grpc"): +def test_list_host_project_registrations_rest_pager(transport: str = "rest"): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( host_project_registration_service.ListHostProjectRegistrationsResponse( host_project_registrations=[ host_project_registration_service.HostProjectRegistration(), @@ -2518,22 +2192,26 @@ def test_list_host_project_registrations_pager(transport_name: str = "grpc"): host_project_registration_service.HostProjectRegistration(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_host_project_registrations( - request={}, retry=retry, timeout=timeout + # Wrap the values into proper Response objs + response = tuple( + host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + x + ) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_host_project_registrations(request=sample_request) results = list(pager) assert len(results) == 6 @@ -2542,3300 +2220,791 @@ def test_list_host_project_registrations_pager(transport_name: str = "grpc"): for i in results ) - -def test_list_host_project_registrations_pages(transport_name: str = "grpc"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, + pages = list( + client.list_host_project_registrations(request=sample_request).pages ) - pages = list(client.list_host_project_registrations(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_pager(): - client = HostProjectRegistrationServiceAsyncClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_host_project_registrations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, host_project_registration_service.HostProjectRegistration) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_pages(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_host_project_registrations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.CreateHostProjectRegistrationRequest, - dict, - ], -) -def test_create_host_project_registration_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["host_project_registration"] = { - "name": "name_value", - "gcp_project": "gcp_project_value", - "create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ - "host_project_registration" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "host_project_registration" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["host_project_registration"][field]) - ): - del request_init["host_project_registration"][field][i][subfield] - else: - del request_init["host_project_registration"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_host_project_registration(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" - - -def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_host_project_registration - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_host_project_registration - ] = mock_rpc - - request = {} - client.create_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_host_project_registration(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_host_project_registration_rest_required_fields( - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["host_project_registration_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "hostProjectRegistrationId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "hostProjectRegistrationId" in jsonified_request - assert ( - jsonified_request["hostProjectRegistrationId"] - == request_init["host_project_registration_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request[ - "hostProjectRegistrationId" - ] = "host_project_registration_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_host_project_registration._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("host_project_registration_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "hostProjectRegistrationId" in jsonified_request - assert ( - jsonified_request["hostProjectRegistrationId"] - == "host_project_registration_id_value" - ) - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_host_project_registration(request) - - expected_params = [ - ( - "hostProjectRegistrationId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_host_project_registration_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.create_host_project_registration._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("hostProjectRegistrationId",)) - & set( - ( - "parent", - "hostProjectRegistrationId", - "hostProjectRegistration", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_host_project_registration_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_create_host_project_registration", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_create_host_project_registration", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.CreateHostProjectRegistrationRequest.pb( - host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - host_project_registration_service.HostProjectRegistration.to_json( - host_project_registration_service.HostProjectRegistration() - ) - ) - - request = ( - host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = host_project_registration_service.HostProjectRegistration() - - client.create_host_project_registration( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_host_project_registration_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_host_project_registration(request) - - -def test_create_host_project_registration_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_host_project_registration(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" - % client.transport._host, - args[1], - ) - - -def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_host_project_registration( - host_project_registration_service.CreateHostProjectRegistrationRequest(), - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - - -def test_create_host_project_registration_rest_error(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.GetHostProjectRegistrationRequest, - dict, - ], -) -def test_get_host_project_registration_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_host_project_registration(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" - - -def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_host_project_registration - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_host_project_registration - ] = mock_rpc - - request = {} - client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_host_project_registration(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_host_project_registration_rest_required_fields( - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_host_project_registration(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_host_project_registration_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_host_project_registration._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_host_project_registration_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_get_host_project_registration", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_get_host_project_registration", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.GetHostProjectRegistrationRequest.pb( - host_project_registration_service.GetHostProjectRegistrationRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - host_project_registration_service.HostProjectRegistration.to_json( - host_project_registration_service.HostProjectRegistration() - ) - ) - - request = host_project_registration_service.GetHostProjectRegistrationRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = host_project_registration_service.HostProjectRegistration() - - client.get_host_project_registration( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_host_project_registration_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_host_project_registration(request) - - -def test_get_host_project_registration_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_host_project_registration(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" - % client.transport._host, - args[1], - ) - - -def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_host_project_registration( - host_project_registration_service.GetHostProjectRegistrationRequest(), - name="name_value", - ) - - -def test_get_host_project_registration_rest_error(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.ListHostProjectRegistrationsRequest, - dict, - ], -) -def test_list_host_project_registrations_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_host_project_registrations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHostProjectRegistrationsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_host_project_registrations - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_host_project_registrations - ] = mock_rpc - - request = {} - client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_host_project_registrations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_host_project_registrations_rest_required_fields( - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_host_project_registrations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_host_project_registrations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_host_project_registrations(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_host_project_registrations_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_host_project_registrations._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_host_project_registrations_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_list_host_project_registrations", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_list_host_project_registrations", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.ListHostProjectRegistrationsRequest.pb( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - client.list_host_project_registrations( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_host_project_registrations_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_host_project_registrations(request) - - -def test_list_host_project_registrations_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_host_project_registrations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" - % client.transport._host, - args[1], - ) - - -def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_host_project_registrations( - host_project_registration_service.ListHostProjectRegistrationsRequest(), - parent="parent_value", - ) - - -def test_list_host_project_registrations_rest_pager(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_host_project_registrations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, host_project_registration_service.HostProjectRegistration) - for i in results - ) - - pages = list( - client.list_host_project_registrations(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.HostProjectRegistrationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - transports.HostProjectRegistrationServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = HostProjectRegistrationServiceClient.get_transport_class( - transport_name - )( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.HostProjectRegistrationServiceGrpcTransport, - ) - - -def test_host_project_registration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.HostProjectRegistrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_host_project_registration_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.HostProjectRegistrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_host_project_registration", - "get_host_project_registration", - "list_host_project_registrations", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_host_project_registration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HostProjectRegistrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_host_project_registration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HostProjectRegistrationServiceTransport() - adc.assert_called_once() - - -def test_host_project_registration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - HostProjectRegistrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - transports.HostProjectRegistrationServiceRestTransport, - ], -) -def test_host_project_registration_service_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.HostProjectRegistrationServiceGrpcTransport, grpc_helpers), - ( - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - grpc_helpers_async, - ), - ], -) -def test_host_project_registration_service_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.HostProjectRegistrationServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_host_project_registration_service_host_no_port(transport_name): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_host_project_registration_service_host_with_port(transport_name): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_host_project_registration_service_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = HostProjectRegistrationServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = HostProjectRegistrationServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_host_project_registration._session - session2 = client2.transport.create_host_project_registration._session - assert session1 != session2 - session1 = client1.transport.get_host_project_registration._session - session2 = client2.transport.get_host_project_registration._session - assert session1 != session2 - session1 = client1.transport.list_host_project_registrations._session - session2 = client2.transport.list_host_project_registrations._session - assert session1 != session2 - - -def test_host_project_registration_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_host_project_registration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.HostProjectRegistrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_host_project_registration_path(): - project = "squid" - location = "clam" - host_project_registration = "whelk" - expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( - project=project, - location=location, - host_project_registration=host_project_registration, - ) - actual = HostProjectRegistrationServiceClient.host_project_registration_path( - project, location, host_project_registration - ) - assert expected == actual - - -def test_parse_host_project_registration_path(): - expected = { - "project": "octopus", - "location": "oyster", - "host_project_registration": "nudibranch", - } - path = HostProjectRegistrationServiceClient.host_project_registration_path( - **expected - ) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( - path - ) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = HostProjectRegistrationServiceClient.common_billing_account_path( - billing_account - ) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( - path - ) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = HostProjectRegistrationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = HostProjectRegistrationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = HostProjectRegistrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = HostProjectRegistrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = HostProjectRegistrationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = HostProjectRegistrationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = HostProjectRegistrationServiceClient.common_location_path( - project, location - ) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = HostProjectRegistrationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = HostProjectRegistrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = HostProjectRegistrationServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # It is an error to provide an api_key and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = HostProjectRegistrationServiceClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = HostProjectRegistrationServiceClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.HostProjectRegistrationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = HostProjectRegistrationServiceClient.get_transport_class( + transport_name + )( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_host_project_registration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_host_project_registration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_host_project_registration", + "get_host_project_registration", + "list_host_project_registrations", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_host_project_registration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_host_project_registration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_host_project_registration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + HostProjectRegistrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.HostProjectRegistrationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_no_port(transport_name): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_with_port(transport_name): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = HostProjectRegistrationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = HostProjectRegistrationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_host_project_registration._session + session2 = client2.transport.create_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.get_host_project_registration._session + session2 = client2.transport.get_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.list_host_project_registrations._session + session2 = client2.transport.list_host_project_registrations._session + assert session1 != session2 -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_host_project_registration_path(): + project = "squid" + location = "clam" + host_project_registration = "whelk" + expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( + project=project, + location=location, + host_project_registration=host_project_registration, + ) + actual = HostProjectRegistrationServiceClient.host_project_registration_path( + project, location, host_project_registration ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_host_project_registration_path(): + expected = { + "project": "octopus", + "location": "oyster", + "host_project_registration": "nudibranch", + } + path = HostProjectRegistrationServiceClient.host_project_registration_path( + **expected + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( + path + ) + assert expected == actual -def test_cancel_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = HostProjectRegistrationServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( + path ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = HostProjectRegistrationServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = HostProjectRegistrationServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = HostProjectRegistrationServiceClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = HostProjectRegistrationServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = HostProjectRegistrationServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = HostProjectRegistrationServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = HostProjectRegistrationServiceClient.common_location_path( + project, location + ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = HostProjectRegistrationServiceClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = HostProjectRegistrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5853,7 +3022,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = HostProjectRegistrationServiceClient( @@ -5872,11 +3040,7 @@ def test_client_ctx(): [ ( HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, + transports.HostProjectRegistrationServiceRestTransport, ), ], ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py index 947a54eb86f7..db139191d3f8 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.linting_service import ( - LintingServiceAsyncClient, LintingServiceClient, transports, ) @@ -211,11 +210,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -300,7 +294,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -380,8 +373,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (LintingServiceClient, "grpc"), - (LintingServiceAsyncClient, "grpc_asyncio"), (LintingServiceClient, "rest"), ], ) @@ -406,8 +397,6 @@ def test_linting_service_client_from_service_account_info(client_class, transpor @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.LintingServiceGrpcTransport, "grpc"), - (transports.LintingServiceGrpcAsyncIOTransport, "grpc_asyncio"), (transports.LintingServiceRestTransport, "rest"), ], ) @@ -432,8 +421,6 @@ def test_linting_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (LintingServiceClient, "grpc"), - (LintingServiceAsyncClient, "grpc_asyncio"), (LintingServiceClient, "rest"), ], ) @@ -465,24 +452,17 @@ def test_linting_service_client_from_service_account_file(client_class, transpor def test_linting_service_client_get_transport_class(): transport = LintingServiceClient.get_transport_class() available_transports = [ - transports.LintingServiceGrpcTransport, transports.LintingServiceRestTransport, ] assert transport in available_transports - transport = LintingServiceClient.get_transport_class("grpc") - assert transport == transports.LintingServiceGrpcTransport + transport = LintingServiceClient.get_transport_class("rest") + assert transport == transports.LintingServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -491,11 +471,6 @@ def test_linting_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test_linting_service_client_client_options( client_class, transport_class, transport_name ): @@ -629,20 +604,6 @@ def test_linting_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc", "true"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc", "false"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "true"), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "false"), ], @@ -652,11 +613,6 @@ def test_linting_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_linting_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -760,19 +716,12 @@ def test_linting_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", [LintingServiceClient, LintingServiceAsyncClient] -) +@pytest.mark.parametrize("client_class", [LintingServiceClient]) @mock.patch.object( LintingServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LintingServiceAsyncClient), -) def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -864,19 +813,12 @@ def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize( - "client_class", [LintingServiceClient, LintingServiceAsyncClient] -) +@pytest.mark.parametrize("client_class", [LintingServiceClient]) @mock.patch.object( LintingServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test_linting_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -953,12 +895,6 @@ def test_linting_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -990,18 +926,6 @@ def test_linting_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - LintingServiceClient, - transports.LintingServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", None), ], ) @@ -1029,96 +953,6 @@ def test_linting_service_client_client_options_credentials_file( ) -def test_linting_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = LintingServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - LintingServiceClient, - transports.LintingServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_linting_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1126,30 +960,36 @@ def test_linting_service_client_create_channel_credentials_file( dict, ], ) -def test_get_style_guide(request_type, transport: str = "grpc"): +def test_get_style_guide_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( name="name_value", linter=common_fields.Linter.SPECTRAL, ) - response = client.get_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide(request) # Establish that the response is the type that we expect. assert isinstance(response, linting_service.StyleGuide) @@ -1157,60 +997,13 @@ def test_get_style_guide(request_type, transport: str = "grpc"): assert response.linter == common_fields.Linter.SPECTRAL -def test_get_style_guide_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest() - - -def test_get_style_guide_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.GetStyleGuideRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest( - name="name_value", - ) - - -def test_get_style_guide_use_cached_wrapped_rpc(): +def test_get_style_guide_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1226,6 +1019,7 @@ def test_get_style_guide_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + request = {} client.get_style_guide(request) @@ -1239,256 +1033,237 @@ def test_get_style_guide_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_style_guide_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.get_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest() - - -@pytest.mark.asyncio -async def test_get_style_guide_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_style_guide_rest_required_fields( + request_type=linting_service.GetStyleGuideRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_style_guide - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_style_guide - ] = mock_rpc - - request = {} - await client.get_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_style_guide(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.LintingServiceRestTransport -@pytest.mark.asyncio -async def test_get_style_guide_async( - transport: str = "grpc_asyncio", request_type=linting_service.GetStyleGuideRequest -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.get_style_guide(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_get_style_guide_async_from_dict(): - await test_get_style_guide_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_style_guide_field_headers(): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value = linting_service.StyleGuide() - client.get_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_style_guide_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideRequest() + response = client.get_style_guide(request) - request.name = "name_value" + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - await client.get_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_get_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_style_guide_flattened(): - client = LintingServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_style_guide( - name="name_value", + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideRequest.pb( + linting_service.GetStyleGuideRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = linting_service.GetStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + + client.get_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_get_style_guide_flattened_error(): +def test_get_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide( - linting_service.GetStyleGuideRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide(request) -@pytest.mark.asyncio -async def test_get_style_guide_flattened_async(): - client = LintingServiceAsyncClient( + +def test_get_style_guide_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_style_guide( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_style_guide(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_style_guide_flattened_error_async(): - client = LintingServiceAsyncClient( +def test_get_style_guide_rest_flattened_error(transport: str = "rest"): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_style_guide( + client.get_style_guide( linting_service.GetStyleGuideRequest(), name="name_value", ) +def test_get_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1496,93 +1271,124 @@ async def test_get_style_guide_flattened_error_async(): dict, ], ) -def test_update_style_guide(request_type, transport: str = "grpc"): +def test_update_style_guide_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request_init["style_guide"] = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", + "linter": 1, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - response = client.update_style_guide(request) + # Determine if the message type is proto-plus or protobuf + test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.UpdateStyleGuideRequest() - assert args[0] == request + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_update_style_guide_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["style_guide"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_update_style_guide_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.UpdateStyleGuideRequest() + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["style_guide"][field])): + del request_init["style_guide"][field][i][subfield] + else: + del request_init["style_guide"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( + name="name_value", + linter=common_fields.Linter.SPECTRAL, ) - client.update_style_guide(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, linting_service.StyleGuide) + assert response.name == "name_value" + assert response.linter == common_fields.Linter.SPECTRAL -def test_update_style_guide_use_cached_wrapped_rpc(): +def test_update_style_guide_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1602,6 +1408,7 @@ def test_update_style_guide_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_style_guide ] = mock_rpc + request = {} client.update_style_guide(request) @@ -1615,216 +1422,223 @@ def test_update_style_guide_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_style_guide_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_update_style_guide_rest_required_fields( + request_type=linting_service.UpdateStyleGuideRequest, +): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.update_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_update_style_guide_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - # Ensure method has been cached - assert ( - client._client._transport.update_style_guide - in client._client._transport._wrapped_methods - ) + # verify required fields with non-default values are left alone - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_style_guide - ] = mock_rpc + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - request = {} - await client.update_style_guide(request) + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + response_value = Response() + response_value.status_code = 200 - await client.update_style_guide(request) + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) -@pytest.mark.asyncio -async def test_update_style_guide_async( - transport: str = "grpc_asyncio", - request_type=linting_service.UpdateStyleGuideRequest, -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + unset_fields = transport.update_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) - # Mock the actual call within the gRPC stub, and fake the request. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_update_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_update_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.UpdateStyleGuideRequest.pb( + linting_service.UpdateStyleGuideRequest() ) - response = await client.update_style_guide(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.UpdateStyleGuideRequest() - assert args[0] == request + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + request = linting_service.UpdateStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + client.update_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_update_style_guide_async_from_dict(): - await test_update_style_guide_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_update_style_guide_field_headers(): +def test_update_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.UpdateStyleGuideRequest() - - request.style_guide.name = "name_value" + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value client.update_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "style_guide.name=name_value", - ) in kw["metadata"] - -@pytest.mark.asyncio -async def test_update_style_guide_field_headers_async(): - client = LintingServiceAsyncClient( +def test_update_style_guide_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.UpdateStyleGuideRequest() - - request.style_guide.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - await client.update_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "style_guide.name=name_value", - ) in kw["metadata"] - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() -def test_update_style_guide_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_style_guide( + # get truthy value for each flattened field + mock_args = dict( style_guide=linting_service.StyleGuide(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_style_guide(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].style_guide - mock_val = linting_service.StyleGuide(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) -def test_update_style_guide_flattened_error(): +def test_update_style_guide_rest_flattened_error(transport: str = "rest"): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1837,56 +1651,11 @@ def test_update_style_guide_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_style_guide_flattened_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_style_guide( - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].style_guide - mock_val = linting_service.StyleGuide(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_style_guide_flattened_error_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_style_guide( - linting_service.UpdateStyleGuideRequest(), - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - @pytest.mark.parametrize( "request_type", @@ -1895,97 +1664,50 @@ async def test_update_style_guide_flattened_error_async(): dict, ], ) -def test_get_style_guide_contents(request_type, transport: str = "grpc"): +def test_get_style_guide_contents_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents( contents=b"contents_blob", mime_type="mime_type_value", ) - response = client.get_style_guide_contents(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideContentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) + + # Establish that the response is the type that we expect. assert isinstance(response, linting_service.StyleGuideContents) assert response.contents == b"contents_blob" assert response.mime_type == "mime_type_value" -def test_get_style_guide_contents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest() - - -def test_get_style_guide_contents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.GetStyleGuideContentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide_contents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest( - name="name_value", - ) - - -def test_get_style_guide_contents_use_cached_wrapped_rpc(): +def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2006,6 +1728,7 @@ def test_get_style_guide_contents_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_style_guide_contents ] = mock_rpc + request = {} client.get_style_guide_contents(request) @@ -2019,544 +1742,245 @@ def test_get_style_guide_contents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_style_guide_contents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_style_guide_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest() - - -@pytest.mark.asyncio -async def test_get_style_guide_contents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_style_guide_contents_rest_required_fields( + request_type=linting_service.GetStyleGuideContentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.LintingServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_style_guide_contents - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_style_guide_contents - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_style_guide_contents(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_style_guide_contents(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_style_guide_contents_async( - transport: str = "grpc_asyncio", - request_type=linting_service.GetStyleGuideContentsRequest, -): - client = LintingServiceAsyncClient( + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_style_guide_contents(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideContentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuideContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) -@pytest.mark.asyncio -async def test_get_style_guide_contents_async_from_dict(): - await test_get_style_guide_contents_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_style_guide_contents_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_style_guide_contents_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value = linting_service.StyleGuideContents() - client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_style_guide_contents_field_headers_async(): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_contents_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = LintingServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideContentsRequest.pb( + linting_service.GetStyleGuideContentsRequest() ) - await client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuideContents.to_json( + linting_service.StyleGuideContents() + ) -def test_get_style_guide_contents_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = linting_service.GetStyleGuideContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuideContents() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_style_guide_contents( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_style_guide_contents_flattened_error(): +def test_get_style_guide_contents_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide_contents(request) -@pytest.mark.asyncio -async def test_get_style_guide_contents_flattened_async(): - client = LintingServiceAsyncClient( + +def test_get_style_guide_contents_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_style_guide_contents( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_style_guide_contents_flattened_error_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.get_style_guide_contents(**mock_args) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" + % client.transport._host, + args[1], ) -@pytest.mark.parametrize( - "request_type", - [ - linting_service.LintSpecRequest, - dict, - ], -) -def test_lint_spec(request_type, transport: str = "grpc"): +def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.LintSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_lint_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lint_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest() - - -def test_lint_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.LintSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lint_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_style_guide_contents( + linting_service.GetStyleGuideContentsRequest(), name="name_value", ) -def test_lint_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lint_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc - request = {} - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lint_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.lint_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest() - - -@pytest.mark.asyncio -async def test_lint_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.lint_spec - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lint_spec - ] = mock_rpc - - request = {} - await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lint_spec_async( - transport: str = "grpc_asyncio", request_type=linting_service.LintSpecRequest -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.LintSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_lint_spec_async_from_dict(): - await test_lint_spec_async(request_type=dict) - - -def test_lint_spec_field_headers(): +def test_get_style_guide_contents_rest_error(): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.LintSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value = None - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_lint_spec_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.LintSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - @pytest.mark.parametrize( "request_type", [ - linting_service.GetStyleGuideRequest, + linting_service.LintSpecRequest, dict, ], ) -def test_get_style_guide_rest(request_type): +def test_lint_spec_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2564,36 +1988,29 @@ def test_get_style_guide_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_style_guide(request) + response = client.lint_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + assert response is None -def test_get_style_guide_rest_use_cached_wrapped_rpc(): +def test_lint_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2607,31 +2024,29 @@ def test_get_style_guide_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_style_guide in client._transport._wrapped_methods + assert client._transport.lint_spec in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc request = {} - client.get_style_guide(request) + client.lint_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_style_guide(request) + client.lint_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_style_guide_rest_required_fields( - request_type=linting_service.GetStyleGuideRequest, -): +def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): transport_class = transports.LintingServiceRestTransport request_init = {} @@ -2646,7 +2061,7 @@ def test_get_style_guide_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide._get_unset_required_fields(jsonified_request) + ).lint_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2655,7 +2070,7 @@ def test_get_style_guide_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide._get_unset_required_fields(jsonified_request) + ).lint_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2669,7 +2084,7 @@ def test_get_style_guide_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2681,39 +2096,37 @@ def test_get_style_guide_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_style_guide(request) + response = client.lint_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_style_guide_rest_unset_required_fields(): +def test_lint_spec_rest_unset_required_fields(): transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_style_guide._get_unset_required_fields({}) + unset_fields = transport.lint_spec._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_style_guide_rest_interceptors(null_interceptor): +def test_lint_spec_rest_interceptors(null_interceptor): transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2726,14 +2139,11 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_get_style_guide" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_get_style_guide" + transports.LintingServiceRestInterceptor, "pre_lint_spec" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.GetStyleGuideRequest.pb( - linting_service.GetStyleGuideRequest() + pb_message = linting_service.LintSpecRequest.pb( + linting_service.LintSpecRequest() ) transcode.return_value = { "method": "post", @@ -2745,19 +2155,15 @@ def test_get_style_guide_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuide.to_json( - linting_service.StyleGuide() - ) - request = linting_service.GetStyleGuideRequest() + request = linting_service.LintSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = linting_service.StyleGuide() - client.get_style_guide( + client.lint_spec( request, metadata=[ ("key", "val"), @@ -2766,11 +2172,10 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_style_guide_rest_bad_request( - transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +def test_lint_spec_rest_bad_request( + transport: str = "rest", request_type=linting_service.LintSpecRequest ): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2779,7 +2184,7 @@ def test_get_style_guide_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" } request = request_type(**request_init) @@ -2792,2969 +2197,815 @@ def test_get_style_guide_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_style_guide(request) + client.lint_spec(request) -def test_get_style_guide_rest_flattened(): +def test_lint_spec_rest_error(): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_style_guide(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" - % client.transport._host, - args[1], - ) - -def test_get_style_guide_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.get_style_guide( - linting_service.GetStyleGuideRequest(), - name="name_value", + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - -def test_get_style_guide_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.UpdateStyleGuideRequest, - dict, - ], -) -def test_update_style_guide_rest(request_type): - client = LintingServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - request_init["style_guide"] = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", - "linter": 1, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["style_guide"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["style_guide"][field])): - del request_init["style_guide"][field][i][subfield] - else: - del request_init["style_guide"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_style_guide(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL - - -def test_update_style_guide_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + with pytest.raises(ValueError): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_style_guide in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - client._transport._wrapped_methods[ - client._transport.update_style_guide - ] = mock_rpc - - request = {} - client.update_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_style_guide(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_style_guide_rest_required_fields( - request_type=linting_service.UpdateStyleGuideRequest, -): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_style_guide._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_style_guide._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - client = LintingServiceClient( + # It is an error to provide an api_key and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_style_guide(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_style_guide_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_style_guide._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_style_guide_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_update_style_guide" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_update_style_guide" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.UpdateStyleGuideRequest.pb( - linting_service.UpdateStyleGuideRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuide.to_json( - linting_service.StyleGuide() - ) - - request = linting_service.UpdateStyleGuideRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = linting_service.StyleGuide() - - client.update_style_guide( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_style_guide_rest_bad_request( - transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_style_guide(request) - - -def test_update_style_guide_rest_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - - # get arguments that satisfy an http rule for this method - sample_request = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - - # get truthy value for each flattened field - mock_args = dict( - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_style_guide(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" - % client.transport._host, - args[1], - ) - - -def test_update_style_guide_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. + options = client_options.ClientOptions() + options.api_key = "api_key" with pytest.raises(ValueError): - client.update_style_guide( - linting_service.UpdateStyleGuideRequest(), - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_style_guide_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.GetStyleGuideContentsRequest, - dict, - ], -) -def test_get_style_guide_contents_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_style_guide_contents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuideContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" - - -def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_style_guide_contents - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_style_guide_contents - ] = mock_rpc - - request = {} - client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_style_guide_contents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_style_guide_contents_rest_required_fields( - request_type=linting_service.GetStyleGuideContentsRequest, -): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_style_guide_contents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_style_guide_contents_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_style_guide_contents_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.GetStyleGuideContentsRequest.pb( - linting_service.GetStyleGuideContentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuideContents.to_json( - linting_service.StyleGuideContents() - ) - - request = linting_service.GetStyleGuideContentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = linting_service.StyleGuideContents() - - client.get_style_guide_contents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_style_guide_contents_rest_bad_request( - transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_style_guide_contents(request) - - -def test_get_style_guide_contents_rest_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_style_guide_contents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" - % client.transport._host, - args[1], - ) - - -def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", - ) - - -def test_get_style_guide_contents_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.LintSpecRequest, - dict, - ], -) -def test_lint_spec_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lint_spec(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_lint_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lint_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc - - request = {} - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lint_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lint_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lint_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lint_spec_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.lint_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lint_spec_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_lint_spec" - ) as pre: - pre.assert_not_called() - pb_message = linting_service.LintSpecRequest.pb( - linting_service.LintSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = linting_service.LintSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.lint_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_lint_spec_rest_bad_request( - transport: str = "rest", request_type=linting_service.LintSpecRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lint_spec(request) - - -def test_lint_spec_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LintingServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LintingServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - transports.LintingServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = LintingServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LintingServiceGrpcTransport, - ) - - -def test_linting_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LintingServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_linting_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LintingServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_style_guide", - "update_style_guide", - "get_style_guide_contents", - "lint_spec", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_linting_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LintingServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_linting_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LintingServiceTransport() - adc.assert_called_once() - - -def test_linting_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LintingServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - transports.LintingServiceRestTransport, - ], -) -def test_linting_service_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LintingServiceGrpcTransport, grpc_helpers), - (transports.LintingServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_linting_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_linting_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.LintingServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_linting_service_host_no_port(transport_name): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_linting_service_host_with_port(transport_name): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_linting_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LintingServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LintingServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_style_guide._session - session2 = client2.transport.get_style_guide._session - assert session1 != session2 - session1 = client1.transport.update_style_guide._session - session2 = client2.transport.update_style_guide._session - assert session1 != session2 - session1 = client1.transport.get_style_guide_contents._session - session2 = client2.transport.get_style_guide_contents._session - assert session1 != session2 - session1 = client1.transport.lint_spec._session - session2 = client2.transport.lint_spec._session - assert session1 != session2 - - -def test_linting_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LintingServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_linting_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LintingServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_spec_path(): - project = "squid" - location = "clam" - api = "whelk" - version = "octopus" - spec = "oyster" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( - project=project, - location=location, - api=api, - version=version, - spec=spec, - ) - actual = LintingServiceClient.spec_path(project, location, api, version, spec) - assert expected == actual - - -def test_parse_spec_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "api": "mussel", - "version": "winkle", - "spec": "nautilus", - } - path = LintingServiceClient.spec_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_spec_path(path) - assert expected == actual - - -def test_style_guide_path(): - project = "scallop" - location = "abalone" - plugin = "squid" - expected = ( - "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( - project=project, - location=location, - plugin=plugin, - ) - ) - actual = LintingServiceClient.style_guide_path(project, location, plugin) - assert expected == actual - - -def test_parse_style_guide_path(): - expected = { - "project": "clam", - "location": "whelk", - "plugin": "octopus", - } - path = LintingServiceClient.style_guide_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_style_guide_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = LintingServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = LintingServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = LintingServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = LintingServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = LintingServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = LintingServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format( - project=project, - ) - actual = LintingServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = LintingServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = LintingServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = LintingServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.LintingServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.LintingServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = LintingServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = LintingServiceClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = LintingServiceClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.LintingServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LintingServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_linting_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_linting_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_style_guide", + "update_style_guide", + "get_style_guide_contents", + "lint_spec", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_linting_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_linting_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_linting_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LintingServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_linting_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LintingServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_no_port(transport_name): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_with_port(transport_name): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LintingServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LintingServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_style_guide._session + session2 = client2.transport.get_style_guide._session + assert session1 != session2 + session1 = client1.transport.update_style_guide._session + session2 = client2.transport.update_style_guide._session + assert session1 != session2 + session1 = client1.transport.get_style_guide_contents._session + session2 = client2.transport.get_style_guide_contents._session + assert session1 != session2 + session1 = client1.transport.lint_spec._session + session2 = client2.transport.lint_spec._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_spec_path(): + project = "squid" + location = "clam" + api = "whelk" + version = "octopus" + spec = "oyster" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, ) + actual = LintingServiceClient.spec_path(project, location, api, version, spec) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_spec_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "api": "mussel", + "version": "winkle", + "spec": "nautilus", + } + path = LintingServiceClient.spec_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_spec_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_style_guide_path(): + project = "scallop" + location = "abalone" + plugin = "squid" + expected = ( + "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( + project=project, + location=location, + plugin=plugin, + ) ) + actual = LintingServiceClient.style_guide_path(project, location, plugin) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_style_guide_path(): + expected = { + "project": "clam", + "location": "whelk", + "plugin": "octopus", + } + path = LintingServiceClient.style_guide_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_style_guide_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = LintingServiceClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LintingServiceClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, ) + actual = LintingServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LintingServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = LintingServiceClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LintingServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, ) + actual = LintingServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LintingServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = LintingServiceClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LintingServiceClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LintingServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = LintingServiceClient(credentials=ga_credentials.AnonymousCredentials()) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5772,7 +3023,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = LintingServiceClient( @@ -5789,8 +3039,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport), - (LintingServiceAsyncClient, transports.LintingServiceGrpcAsyncIOTransport), + (LintingServiceClient, transports.LintingServiceRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py index f3cf6227c307..06f68007eef1 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -55,11 +55,7 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.cloud.apihub_v1.services.provisioning import ( - ProvisioningAsyncClient, - ProvisioningClient, - transports, -) +from google.cloud.apihub_v1.services.provisioning import ProvisioningClient, transports from google.cloud.apihub_v1.types import common_fields, provisioning_service @@ -205,11 +201,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -294,7 +285,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -374,8 +364,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ProvisioningClient, "grpc"), - (ProvisioningAsyncClient, "grpc_asyncio"), (ProvisioningClient, "rest"), ], ) @@ -400,8 +388,6 @@ def test_provisioning_client_from_service_account_info(client_class, transport_n @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ProvisioningGrpcTransport, "grpc"), - (transports.ProvisioningGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ProvisioningRestTransport, "rest"), ], ) @@ -426,8 +412,6 @@ def test_provisioning_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ProvisioningClient, "grpc"), - (ProvisioningAsyncClient, "grpc_asyncio"), (ProvisioningClient, "rest"), ], ) @@ -459,24 +443,17 @@ def test_provisioning_client_from_service_account_file(client_class, transport_n def test_provisioning_client_get_transport_class(): transport = ProvisioningClient.get_transport_class() available_transports = [ - transports.ProvisioningGrpcTransport, transports.ProvisioningRestTransport, ] assert transport in available_transports - transport = ProvisioningClient.get_transport_class("grpc") - assert transport == transports.ProvisioningGrpcTransport + transport = ProvisioningClient.get_transport_class("rest") + assert transport == transports.ProvisioningRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -485,11 +462,6 @@ def test_provisioning_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test_provisioning_client_client_options( client_class, transport_class, transport_name ): @@ -623,20 +595,6 @@ def test_provisioning_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc", "true"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc", "false"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "true"), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "false"), ], @@ -646,11 +604,6 @@ def test_provisioning_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_provisioning_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -754,15 +707,10 @@ def test_provisioning_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ProvisioningClient, ProvisioningAsyncClient]) +@pytest.mark.parametrize("client_class", [ProvisioningClient]) @mock.patch.object( ProvisioningClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProvisioningClient) ) -@mock.patch.object( - ProvisioningAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ProvisioningAsyncClient), -) def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -854,17 +802,12 @@ def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ProvisioningClient, ProvisioningAsyncClient]) +@pytest.mark.parametrize("client_class", [ProvisioningClient]) @mock.patch.object( ProvisioningClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test_provisioning_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -941,12 +884,6 @@ def test_provisioning_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -978,18 +915,6 @@ def test_provisioning_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ProvisioningClient, - transports.ProvisioningGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", None), ], ) @@ -1017,94 +942,6 @@ def test_provisioning_client_client_options_credentials_file( ) -def test_provisioning_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ProvisioningClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ProvisioningClient, - transports.ProvisioningGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_provisioning_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1112,94 +949,120 @@ def test_provisioning_client_create_channel_credentials_file( dict, ], ) -def test_create_api_hub_instance(request_type, transport: str = "grpc"): +def test_create_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api_hub_instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "state_message": "state_message_value", + "config": {"cmek_key_name": "cmek_key_name_value"}, + "labels": {}, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_api_hub_instance(request) + # Determine if the message type is proto-plus or protobuf + test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ + "api_hub_instance" + ] - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.CreateApiHubInstanceRequest() - assert args[0] == request + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_create_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest() + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_create_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance_id="api_hub_instance_id_value", - ) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api_hub_instance"][field])): + del request_init["api_hub_instance"][field][i][subfield] + else: + del request_init["api_hub_instance"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance_id="api_hub_instance_id_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" -def test_create_api_hub_instance_use_cached_wrapped_rpc(): +def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1220,15 +1083,15 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_api_hub_instance ] = mock_rpc + request = {} client.create_api_hub_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api_hub_instance(request) @@ -1238,279 +1101,233 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_create_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_api_hub_instance_rest_required_fields( + request_type=provisioning_service.CreateApiHubInstanceRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_api_hub_instance - in client._client._transport._wrapped_methods - ) + transport_class = transports.ProvisioningRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_api_hub_instance - ] = mock_rpc + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.create_api_hub_instance(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # verify required fields with default values are now present - await client.create_api_hub_instance(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_hub_instance_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_api_hub_instance(request) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.CreateApiHubInstanceRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) -@pytest.mark.asyncio -async def test_create_api_hub_instance_async_from_dict(): - await test_create_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.CreateApiHubInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiHubInstanceId",)) + & set( + ( + "parent", + "apiHubInstance", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.CreateApiHubInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( + provisioning_service.CreateApiHubInstanceRequest() ) - await client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) -def test_create_api_hub_instance_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = provisioning_service.CreateApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_api_hub_instance( - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api_hub_instance - mock_val = common_fields.ApiHubInstance(name="name_value") - assert arg == mock_val - arg = args[0].api_hub_instance_id - mock_val = "api_hub_instance_id_value" - assert arg == mock_val - - -def test_create_api_hub_instance_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.CreateApiHubInstanceRequest, +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api_hub_instance( - provisioning_service.CreateApiHubInstanceRequest(), - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api_hub_instance(request) -@pytest.mark.asyncio -async def test_create_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( +def test_create_api_hub_instance_rest_flattened(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_api_hub_instance( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", api_hub_instance=common_fields.ApiHubInstance(name="name_value"), api_hub_instance_id="api_hub_instance_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api_hub_instance - mock_val = common_fields.ApiHubInstance(name="name_value") - assert arg == mock_val - arg = args[0].api_hub_instance_id - mock_val = "api_hub_instance_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" + % client.transport._host, + args[1], + ) + + +def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_api_hub_instance( + client.create_api_hub_instance( provisioning_service.CreateApiHubInstanceRequest(), parent="parent_value", api_hub_instance=common_fields.ApiHubInstance(name="name_value"), @@ -1518,6 +1335,12 @@ async def test_create_api_hub_instance_flattened_error_async(): ) +def test_create_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1525,34 +1348,38 @@ async def test_create_api_hub_instance_flattened_error_async(): dict, ], ) -def test_get_api_hub_instance(request_type, transport: str = "grpc"): +def test_get_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance( name="name_value", state=common_fields.ApiHubInstance.State.INACTIVE, state_message="state_message_value", description="description_value", ) - response = client.get_api_hub_instance(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.GetApiHubInstanceRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ApiHubInstance) @@ -1562,64 +1389,13 @@ def test_get_api_hub_instance(request_type, transport: str = "grpc"): assert response.description == "description_value" -def test_get_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest() - - -def test_get_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.GetApiHubInstanceRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest( - name="name_value", - ) - - -def test_get_api_hub_instance_use_cached_wrapped_rpc(): +def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1639,6 +1415,7 @@ def test_get_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_api_hub_instance ] = mock_rpc + request = {} client.get_api_hub_instance(request) @@ -1652,275 +1429,237 @@ def test_get_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_get_api_hub_instance_rest_required_fields( + request_type=provisioning_service.GetApiHubInstanceRequest, +): + transport_class = transports.ProvisioningRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - ) - response = await client.get_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_get_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_api_hub_instance - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api_hub_instance - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api_hub_instance(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api_hub_instance(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.GetApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - ) - response = await client.get_api_hub_instance(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.GetApiHubInstanceRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiHubInstance) - assert response.name == "name_value" - assert response.state == common_fields.ApiHubInstance.State.INACTIVE - assert response.state_message == "state_message_value" - assert response.description == "description_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) -@pytest.mark.asyncio -async def test_get_api_hub_instance_async_from_dict(): - await test_get_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.GetApiHubInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value = common_fields.ApiHubInstance() - client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.GetApiHubInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.GetApiHubInstanceRequest.pb( + provisioning_service.GetApiHubInstanceRequest() ) - await client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiHubInstance.to_json( + common_fields.ApiHubInstance() + ) -def test_get_api_hub_instance_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = provisioning_service.GetApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiHubInstance() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_api_hub_instance( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_api_hub_instance_flattened_error(): +def test_get_api_hub_instance_rest_bad_request( + transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_hub_instance( - provisioning_service.GetApiHubInstanceRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_hub_instance(request) -@pytest.mark.asyncio -async def test_get_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( +def test_get_api_hub_instance_rest_flattened(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api_hub_instance( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( +def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api_hub_instance( + client.get_api_hub_instance( provisioning_service.GetApiHubInstanceRequest(), name="name_value", ) +def test_get_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1928,92 +1667,45 @@ async def test_get_api_hub_instance_flattened_error_async(): dict, ], ) -def test_lookup_api_hub_instance(request_type, transport: str = "grpc"): +def test_lookup_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - response = client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.LookupApiHubInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() -def test_lookup_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value ) - client.lookup_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest() - - -def test_lookup_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + json_return_value = json_format.MessageToJson(return_value) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.LookupApiHubInstanceRequest( - parent="parent_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest( - parent="parent_value", - ) + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) -def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): +def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2034,6 +1726,7 @@ def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.lookup_api_hub_instance ] = mock_rpc + request = {} client.lookup_api_hub_instance(request) @@ -2047,204 +1740,223 @@ def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - response = await client.lookup_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_lookup_api_hub_instance_rest_required_fields( + request_type=provisioning_service.LookupApiHubInstanceRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ProvisioningRestTransport - # Ensure method has been cached - assert ( - client._client._transport.lookup_api_hub_instance - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lookup_api_hub_instance - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.lookup_api_hub_instance(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.lookup_api_hub_instance(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - response = await client.lookup_api_hub_instance(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.LookupApiHubInstanceRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async_from_dict(): - await test_lookup_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_lookup_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_lookup_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.LookupApiHubInstanceRequest() + unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), + ) + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( + provisioning_service.LookupApiHubInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + provisioning_service.LookupApiHubInstanceResponse.to_json( + provisioning_service.LookupApiHubInstanceResponse() + ) + ) + request = provisioning_service.LookupApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = provisioning_service.LookupApiHubInstanceResponse() -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.lookup_api_hub_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.LookupApiHubInstanceRequest() + pre.assert_called_once() + post.assert_called_once() - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - await client.lookup_api_hub_instance(request) +def test_lookup_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.LookupApiHubInstanceRequest, +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_api_hub_instance(request) -def test_lookup_api_hub_instance_flattened(): +def test_lookup_api_hub_instance_rest_flattened(): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup_api_hub_instance( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" + % client.transport._host, + args[1], + ) -def test_lookup_api_hub_instance_flattened_error(): +def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2256,3044 +1968,798 @@ def test_lookup_api_hub_instance_flattened_error(): ) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup_api_hub_instance( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val +def test_lookup_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - await client.lookup_api_hub_instance( - provisioning_service.LookupApiHubInstanceRequest(), - parent="parent_value", + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.CreateApiHubInstanceRequest, - dict, - ], -) -def test_create_api_hub_instance_rest(request_type): - client = ProvisioningClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["api_hub_instance"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "state_message": "state_message_value", - "config": {"cmek_key_name": "cmek_key_name_value"}, - "labels": {}, - "description": "description_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ - "api_hub_instance" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api_hub_instance"][field])): - del request_init["api_hub_instance"][field][i][subfield] - else: - del request_init["api_hub_instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + with pytest.raises(ValueError): client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_api_hub_instance - in client._transport._wrapped_methods + # It is an error to provide an api_key and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, + transport=transport, ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - client._transport._wrapped_methods[ - client._transport.create_api_hub_instance - ] = mock_rpc - - request = {} - client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_api_hub_instance_rest_required_fields( - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # It is an error to provide scopes and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api_hub_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("api_hub_instance_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - client = ProvisioningClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("apiHubInstanceId",)) - & set( - ( - "parent", - "apiHubInstance", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( - provisioning_service.CreateApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = provisioning_service.CreateApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_api_hub_instance_rest_bad_request( - transport: str = "rest", - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_api_hub_instance(request) - - -def test_create_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" - % client.transport._host, - args[1], - ) - - -def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api_hub_instance( - provisioning_service.CreateApiHubInstanceRequest(), - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) - - -def test_create_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.GetApiHubInstanceRequest, - dict, - ], -) -def test_get_api_hub_instance_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiHubInstance) - assert response.name == "name_value" - assert response.state == common_fields.ApiHubInstance.State.INACTIVE - assert response.state_message == "state_message_value" - assert response.description == "description_value" - - -def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_api_hub_instance in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_api_hub_instance - ] = mock_rpc - - request = {} - client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_hub_instance_rest_required_fields( - request_type=provisioning_service.GetApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.GetApiHubInstanceRequest.pb( - provisioning_service.GetApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ApiHubInstance.to_json( - common_fields.ApiHubInstance() - ) - - request = provisioning_service.GetApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ApiHubInstance() - - client.get_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_hub_instance_rest_bad_request( - transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api_hub_instance(request) - - -def test_get_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" - % client.transport._host, - args[1], - ) - - -def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_hub_instance( - provisioning_service.GetApiHubInstanceRequest(), - name="name_value", - ) - - -def test_get_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.LookupApiHubInstanceRequest, - dict, - ], -) -def test_lookup_api_hub_instance_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) - - -def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.lookup_api_hub_instance - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.lookup_api_hub_instance - ] = mock_rpc - - request = {} - client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_api_hub_instance_rest_required_fields( - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lookup_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lookup_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( - provisioning_service.LookupApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - provisioning_service.LookupApiHubInstanceResponse.to_json( - provisioning_service.LookupApiHubInstanceResponse() - ) - ) - - request = provisioning_service.LookupApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning_service.LookupApiHubInstanceResponse() - - client.lookup_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_api_hub_instance_rest_bad_request( - transport: str = "rest", - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup_api_hub_instance(request) - - -def test_lookup_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.lookup_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" - % client.transport._host, - args[1], - ) - - -def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup_api_hub_instance( - provisioning_service.LookupApiHubInstanceRequest(), - parent="parent_value", - ) - - -def test_lookup_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ProvisioningClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ProvisioningGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - transports.ProvisioningRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ProvisioningClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ProvisioningGrpcTransport, - ) - - -def test_provisioning_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ProvisioningTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_provisioning_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ProvisioningTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_api_hub_instance", - "get_api_hub_instance", - "lookup_api_hub_instance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_provisioning_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProvisioningTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_provisioning_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProvisioningTransport() - adc.assert_called_once() - - -def test_provisioning_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ProvisioningClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - ], -) -def test_provisioning_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - transports.ProvisioningRestTransport, - ], -) -def test_provisioning_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ProvisioningGrpcTransport, grpc_helpers), - (transports.ProvisioningGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_provisioning_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_provisioning_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ProvisioningRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_provisioning_rest_lro_client(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_provisioning_host_no_port(transport_name): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_provisioning_host_with_port(transport_name): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_provisioning_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ProvisioningClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ProvisioningClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_api_hub_instance._session - session2 = client2.transport.create_api_hub_instance._session - assert session1 != session2 - session1 = client1.transport.get_api_hub_instance._session - session2 = client2.transport.get_api_hub_instance._session - assert session1 != session2 - session1 = client1.transport.lookup_api_hub_instance._session - session2 = client2.transport.lookup_api_hub_instance._session - assert session1 != session2 - - -def test_provisioning_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ProvisioningGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_provisioning_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ProvisioningGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_provisioning_grpc_lro_client(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_provisioning_grpc_lro_async_client(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_api_hub_instance_path(): - project = "squid" - location = "clam" - api_hub_instance = "whelk" - expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( - project=project, - location=location, - api_hub_instance=api_hub_instance, - ) - actual = ProvisioningClient.api_hub_instance_path( - project, location, api_hub_instance - ) - assert expected == actual - - -def test_parse_api_hub_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api_hub_instance": "nudibranch", - } - path = ProvisioningClient.api_hub_instance_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_api_hub_instance_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ProvisioningClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = ProvisioningClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ProvisioningClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = ProvisioningClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ProvisioningClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = ProvisioningClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = ProvisioningClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = ProvisioningClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ProvisioningClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = ProvisioningClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ProvisioningTransport, "_prep_wrapped_messages" - ) as prep: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ProvisioningTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ProvisioningClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request ) + client = ProvisioningClient(transport=transport) + assert client.transport is transport - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProvisioningRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = ProvisioningClient( +def test_transport_kind(transport_name): + transport = ProvisioningClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + assert transport.kind == transport_name - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_operations(request) +def test_provisioning_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_provisioning_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation(transport: str = "grpc"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api_hub_instance", + "get_api_hub_instance", + "lookup_api_hub_instance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +def test_provisioning_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) -def test_delete_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) +def test_provisioning_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport() + adc.assert_called_once() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None +def test_provisioning_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProvisioningClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_provisioning_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProvisioningRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ProvisioningAsyncClient( +def test_provisioning_rest_lro_client(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + transport = client.transport - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client -def test_delete_operation_from_dict(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_no_port(transport_name): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_with_port(transport_name): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProvisioningClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProvisioningClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api_hub_instance._session + session2 = client2.transport.create_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.get_api_hub_instance._session + session2 = client2.transport.get_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.lookup_api_hub_instance._session + session2 = client2.transport.lookup_api_hub_instance._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_api_hub_instance_path(): + project = "squid" + location = "clam" + api_hub_instance = "whelk" + expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( + project=project, + location=location, + api_hub_instance=api_hub_instance, ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + actual = ProvisioningClient.api_hub_instance_path( + project, location, api_hub_instance ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_hub_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api_hub_instance": "nudibranch", + } + path = ProvisioningClient.api_hub_instance_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_api_hub_instance_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ProvisioningClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ProvisioningClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ProvisioningClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ProvisioningClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ProvisioningClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ProvisioningClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = ProvisioningClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ProvisioningClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ProvisioningClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ProvisioningClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProvisioningClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ProvisioningClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ProvisioningAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5311,7 +2777,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ProvisioningClient( @@ -5328,8 +2793,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport), - (ProvisioningAsyncClient, transports.ProvisioningGrpcAsyncIOTransport), + (ProvisioningClient, transports.ProvisioningRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py index f56c0e63d5f7..836aa804eb51 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.runtime_project_attachment_service import ( - RuntimeProjectAttachmentServiceAsyncClient, RuntimeProjectAttachmentServiceClient, pagers, transports, @@ -234,11 +233,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -337,11 +331,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -425,8 +414,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (RuntimeProjectAttachmentServiceClient, "grpc"), - (RuntimeProjectAttachmentServiceAsyncClient, "grpc_asyncio"), (RuntimeProjectAttachmentServiceClient, "rest"), ], ) @@ -453,11 +440,6 @@ def test_runtime_project_attachment_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.RuntimeProjectAttachmentServiceGrpcTransport, "grpc"), - ( - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (transports.RuntimeProjectAttachmentServiceRestTransport, "rest"), ], ) @@ -482,8 +464,6 @@ def test_runtime_project_attachment_service_client_service_account_always_use_jw @pytest.mark.parametrize( "client_class,transport_name", [ - (RuntimeProjectAttachmentServiceClient, "grpc"), - (RuntimeProjectAttachmentServiceAsyncClient, "grpc_asyncio"), (RuntimeProjectAttachmentServiceClient, "rest"), ], ) @@ -517,28 +497,17 @@ def test_runtime_project_attachment_service_client_from_service_account_file( def test_runtime_project_attachment_service_client_get_transport_class(): transport = RuntimeProjectAttachmentServiceClient.get_transport_class() available_transports = [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, transports.RuntimeProjectAttachmentServiceRestTransport, ] assert transport in available_transports - transport = RuntimeProjectAttachmentServiceClient.get_transport_class("grpc") - assert transport == transports.RuntimeProjectAttachmentServiceGrpcTransport + transport = RuntimeProjectAttachmentServiceClient.get_transport_class("rest") + assert transport == transports.RuntimeProjectAttachmentServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -551,11 +520,6 @@ def test_runtime_project_attachment_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_client_options( client_class, transport_class, transport_name ): @@ -693,30 +657,6 @@ def test_runtime_project_attachment_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - "true", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - "false", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -736,11 +676,6 @@ def test_runtime_project_attachment_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_runtime_project_attachment_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -844,20 +779,12 @@ def test_runtime_project_attachment_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", - [RuntimeProjectAttachmentServiceClient, RuntimeProjectAttachmentServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) @mock.patch.object( RuntimeProjectAttachmentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_source( client_class, ): @@ -951,20 +878,12 @@ def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_so ) -@pytest.mark.parametrize( - "client_class", - [RuntimeProjectAttachmentServiceClient, RuntimeProjectAttachmentServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) @mock.patch.object( RuntimeProjectAttachmentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -1045,16 +964,6 @@ def test_runtime_project_attachment_service_client_client_api_endpoint(client_cl @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -1090,18 +999,6 @@ def test_runtime_project_attachment_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -1134,96 +1031,6 @@ def test_runtime_project_attachment_service_client_client_options_credentials_fi ) -def test_runtime_project_attachment_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = RuntimeProjectAttachmentServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_runtime_project_attachment_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1231,34 +1038,114 @@ def test_runtime_project_attachment_service_client_create_channel_credentials_fi dict, ], ) -def test_create_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_create_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["runtime_project_attachment"] = { + "name": "name_value", + "runtime_project": "runtime_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + # Determine if the message type is proto-plus or protobuf + test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ + "runtime_project_attachment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "runtime_project_attachment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["runtime_project_attachment"][field]) + ): + del request_init["runtime_project_attachment"][field][i][subfield] + else: + del request_init["runtime_project_attachment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value", runtime_project="runtime_project_value", ) - response = client.create_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1268,71 +1155,13 @@ def test_create_runtime_project_attachment(request_type, transport: str = "grpc" assert response.runtime_project == "runtime_project_value" -def test_create_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - - -def test_create_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - -def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1353,6 +1182,7 @@ def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_runtime_project_attachment ] = mock_rpc + request = {} client.create_runtime_project_attachment(request) @@ -1366,305 +1196,278 @@ def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.create_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_runtime_project_attachment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request_init["runtime_project_attachment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_runtime_project_attachment - ] = mock_rpc + # verify fields with default values are dropped + assert "runtimeProjectAttachmentId" not in jsonified_request - request = {} - await client.create_runtime_project_attachment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == request_init["runtime_project_attachment_id"] + ) - await client.create_runtime_project_attachment(request) + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "runtimeProjectAttachmentId" + ] = "runtime_project_attachment_id_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("runtime_project_attachment_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == "runtime_project_attachment_id_value" + ) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.create_runtime_project_attachment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async_from_dict(): - await test_create_runtime_project_attachment_async(request_type=dict) + expected_params = [ + ( + "runtimeProjectAttachmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_runtime_project_attachment_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() + unset_fields = ( + transport.create_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("runtimeProjectAttachmentId",)) + & set( + ( + "parent", + "runtimeProjectAttachmentId", + "runtimeProjectAttachment", + ) ) - client.create_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + ) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_create_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_create_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + request = ( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) - await client.create_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_runtime_project_attachment_flattened(): +def test_create_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_runtime_project_attachment( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].runtime_project_attachment - mock_val = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ) - assert arg == mock_val - arg = args[0].runtime_project_attachment_id - mock_val = "runtime_project_attachment_id_value" - assert arg == mock_val - - -def test_create_runtime_project_attachment_flattened_error(): + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_runtime_project_attachment(request) + + +def test_create_runtime_project_attachment_rest_flattened(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_runtime_project_attachment( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value" ), runtime_project_attachment_id="runtime_project_attachment_id_value", ) + mock_args.update(sample_request) - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_runtime_project_attachment( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) + client.create_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].runtime_project_attachment - mock_val = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ) - assert arg == mock_val - arg = args[0].runtime_project_attachment_id - mock_val = "runtime_project_attachment_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) + + +def test_create_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_runtime_project_attachment( + client.create_runtime_project_attachment( runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), parent="parent_value", runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( @@ -1674,6 +1477,12 @@ async def test_create_runtime_project_attachment_flattened_error_async(): ) +def test_create_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1681,34 +1490,38 @@ async def test_create_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_get_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_get_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value", runtime_project="runtime_project_value", ) - response = client.get_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1718,69 +1531,13 @@ def test_get_runtime_project_attachment(request_type, transport: str = "grpc"): assert response.runtime_project == "runtime_project_value" -def test_get_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - - -def test_get_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1801,6 +1558,7 @@ def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_runtime_project_attachment ] = mock_rpc + request = {} client.get_runtime_project_attachment(request) @@ -1814,282 +1572,256 @@ def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.get_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_runtime_project_attachment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_runtime_project_attachment - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_runtime_project_attachment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_runtime_project_attachment(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.get_runtime_project_attachment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async_from_dict(): - await test_get_runtime_project_attachment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_runtime_project_attachment_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - client.get_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_get_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_get_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) ) - await client.get_runtime_project_attachment(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_runtime_project_attachment_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + request = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.get_runtime_project_attachment( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_runtime_project_attachment_flattened_error(): +def test_get_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_runtime_project_attachment( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_get_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_runtime_project_attachment( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_runtime_project_attachment( + client.get_runtime_project_attachment( runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), name="name_value", ) +def test_get_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2097,110 +1829,52 @@ async def test_get_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_list_runtime_project_attachments(request_type, transport: str = "grpc"): +def test_list_runtime_project_attachments_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( next_page_token="next_page_token_value", ) ) - response = client.list_runtime_project_attachments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_runtime_project_attachments(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) assert response.next_page_token == "next_page_token_value" -def test_list_runtime_project_attachments_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_runtime_project_attachments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - - -def test_list_runtime_project_attachments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_runtime_project_attachments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): +def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2221,6 +1895,7 @@ def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_runtime_project_attachments ] = mock_rpc + request = {} client.list_runtime_project_attachments(request) @@ -2234,289 +1909,277 @@ def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_runtime_project_attachments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_runtime_project_attachments_rest_required_fields( + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_runtime_project_attachments - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_runtime_project_attachments - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_runtime_project_attachments(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_runtime_project_attachments(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRuntimeProjectAttachmentsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_from_dict(): - await test_list_runtime_project_attachments_async(request_type=dict) - - -def test_list_runtime_project_attachments_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + response_value = Response() + response_value.status_code = 200 - request.parent = "parent_value" + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - client.list_runtime_project_attachments(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.list_runtime_project_attachments(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_runtime_project_attachments_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + unset_fields = ( + transport.list_runtime_project_attachments._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) - await client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + & set(("parent",)) + ) -def test_list_runtime_project_attachments_flattened(): - client = RuntimeProjectAttachmentServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_list_runtime_project_attachments", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_list_runtime_project_attachments", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + + request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.list_runtime_project_attachments( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_runtime_project_attachments_flattened_error(): +def test_list_runtime_project_attachments_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_runtime_project_attachments( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_runtime_project_attachments(request) -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_list_runtime_project_attachments_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_runtime_project_attachments( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_runtime_project_attachments(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_runtime_project_attachments( + client.list_runtime_project_attachments( runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), parent="parent_value", ) -def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): +def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( runtime_project_attachments=[ runtime_project_attachment_service.RuntimeProjectAttachment(), @@ -2541,22 +2204,26 @@ def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): runtime_project_attachment_service.RuntimeProjectAttachment(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_runtime_project_attachments( - request={}, retry=retry, timeout=timeout + # Wrap the values into proper Response objs + response = tuple( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + x + ) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_runtime_project_attachments(request=sample_request) results = list(pager) assert len(results) == 6 @@ -2565,152 +2232,9 @@ def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): for i in results ) - -def test_list_runtime_project_attachments_pages(transport_name: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, + pages = list( + client.list_runtime_project_attachments(request=sample_request).pages ) - pages = list(client.list_runtime_project_attachments(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_pager(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_runtime_project_attachments( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_pages(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_runtime_project_attachments(request={}) - ).pages: - pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2722,99 +2246,43 @@ async def test_list_runtime_project_attachments_async_pages(): dict, ], ) -def test_delete_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_delete_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_runtime_project_attachment(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - - -def test_delete_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2835,6 +2303,7 @@ def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_runtime_project_attachment ] = mock_rpc + request = {} client.delete_runtime_project_attachment(request) @@ -2848,258 +2317,232 @@ def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_runtime_project_attachment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_runtime_project_attachment - ] = mock_rpc - - request = {} - await client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async( - transport: str = "grpc_asyncio", +def test_delete_runtime_project_attachment_rest_required_fields( request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, ): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_runtime_project_attachment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async_from_dict(): - await test_delete_runtime_project_attachment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_runtime_project_attachment_field_headers(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value = None - client.delete_runtime_project_attachment(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_runtime_project_attachment(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = ( + transport.delete_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_runtime_project_attachment_flattened(): - client = RuntimeProjectAttachmentServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_runtime_project_attachment( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_runtime_project_attachment_flattened_error(): + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_delete_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = ( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_runtime_project_attachment( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_runtime_project_attachment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_runtime_project_attachment( + client.delete_runtime_project_attachment( runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), name="name_value", ) +def test_delete_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3107,33 +2550,35 @@ async def test_delete_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_lookup_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_lookup_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - response = client.lookup_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -3142,69 +2587,13 @@ def test_lookup_runtime_project_attachment(request_type, transport: str = "grpc" ) -def test_lookup_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - - -def test_lookup_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3225,6 +2614,7 @@ def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.lookup_runtime_project_attachment ] = mock_rpc + request = {} client.lookup_runtime_project_attachment(request) @@ -3238,216 +2628,235 @@ def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - response = await client.lookup_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.lookup_runtime_project_attachment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lookup_runtime_project_attachment - ] = mock_rpc - - request = {} - await client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lookup_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async( - transport: str = "grpc_asyncio", +def test_lookup_runtime_project_attachment_rest_required_fields( request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, ): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - response = await client.lookup_runtime_project_attachment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance( - response, - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, - ) + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async_from_dict(): - await test_lookup_runtime_project_attachment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_lookup_runtime_project_attachment_field_headers(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + response = client.lookup_runtime_project_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + unset_fields = ( + transport.lookup_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_lookup_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_lookup_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - await client.lookup_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + client.lookup_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_lookup_runtime_project_attachment_flattened(): +def test_lookup_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_runtime_project_attachment(request) + + +def test_lookup_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup_runtime_project_attachment( + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" + % client.transport._host, + args[1], + ) -def test_lookup_runtime_project_attachment_flattened_error(): +def test_lookup_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3459,3853 +2868,802 @@ def test_lookup_runtime_project_attachment_flattened_error(): ) -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_lookup_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup_runtime_project_attachment( - name="name_value", + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # It is an error to provide an api_key and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, + transport=transport, + ) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( + # It is an error to provide scopes and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - await client.lookup_runtime_project_attachment( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), - name="name_value", + client = RuntimeProjectAttachmentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + assert client.transport is transport + + @pytest.mark.parametrize( - "request_type", + "transport_class", [ - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, - dict, + transports.RuntimeProjectAttachmentServiceRestTransport, ], ) -def test_create_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["runtime_project_attachment"] = { - "name": "name_value", - "runtime_project": "runtime_project_value", - "create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ - "runtime_project_attachment" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "runtime_project_attachment" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["runtime_project_attachment"][field]) - ): - del request_init["runtime_project_attachment"][field][i][subfield] - else: - del request_init["runtime_project_attachment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" - - -def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_runtime_project_attachment - ] = mock_rpc - - request = {} - client.create_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["runtime_project_attachment_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "runtimeProjectAttachmentId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "runtimeProjectAttachmentId" in jsonified_request - assert ( - jsonified_request["runtimeProjectAttachmentId"] - == request_init["runtime_project_attachment_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request[ - "runtimeProjectAttachmentId" - ] = "runtime_project_attachment_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("runtime_project_attachment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "runtimeProjectAttachmentId" in jsonified_request - assert ( - jsonified_request["runtimeProjectAttachmentId"] - == "runtime_project_attachment_id_value" - ) - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_runtime_project_attachment(request) - - expected_params = [ - ( - "runtimeProjectAttachmentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.create_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("runtimeProjectAttachmentId",)) - & set( - ( - "parent", - "runtimeProjectAttachmentId", - "runtimeProjectAttachment", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_create_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_create_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - runtime_project_attachment_service.RuntimeProjectAttachment.to_json( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - ) - - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - - client.create_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_runtime_project_attachment(request) - - -def test_create_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" - % client.transport._host, - args[1], - ) - - -def test_create_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_runtime_project_attachment( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - -def test_create_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_get_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" - - -def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_runtime_project_attachment - ] = mock_rpc - - request = {} - client.get_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_get_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_get_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - runtime_project_attachment_service.RuntimeProjectAttachment.to_json( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - ) - - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - - client.get_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_runtime_project_attachment(request) - - -def test_get_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" - % client.transport._host, - args[1], - ) - - -def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_runtime_project_attachment( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_get_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, - dict, - ], -) -def test_list_runtime_project_attachments_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_runtime_project_attachments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_runtime_project_attachments - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_runtime_project_attachments - ] = mock_rpc - - request = {} - client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_runtime_project_attachments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_runtime_project_attachments_rest_required_fields( - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_runtime_project_attachments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_runtime_project_attachments_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.list_runtime_project_attachments._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_list_runtime_project_attachments", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_list_runtime_project_attachments", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - client.list_runtime_project_attachments( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_runtime_project_attachments_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_runtime_project_attachments(request) - - -def test_list_runtime_project_attachments_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_runtime_project_attachments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" - % client.transport._host, - args[1], - ) - - -def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_runtime_project_attachments( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), - parent="parent_value", - ) - - -def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_runtime_project_attachments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) - for i in results - ) - - pages = list( - client.list_runtime_project_attachments(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_delete_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_runtime_project_attachment - ] = mock_rpc - - request = {} - client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.delete_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_delete_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_runtime_project_attachment(request) - - -def test_delete_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_runtime_project_attachment( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_delete_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_lookup_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, - ) - - -def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.lookup_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.lookup_runtime_project_attachment - ] = mock_rpc - - request = {} - client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lookup_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lookup_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.lookup_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_lookup_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_lookup_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - client.lookup_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup_runtime_project_attachment(request) - - -def test_lookup_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.lookup_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" - % client.transport._host, - args[1], - ) - - -def test_lookup_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup_runtime_project_attachment( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_lookup_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - transports.RuntimeProjectAttachmentServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = RuntimeProjectAttachmentServiceClient.get_transport_class( - transport_name - )( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - ) - - -def test_runtime_project_attachment_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_runtime_project_attachment_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_runtime_project_attachment", - "get_runtime_project_attachment", - "list_runtime_project_attachments", - "delete_runtime_project_attachment", - "lookup_runtime_project_attachment", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_runtime_project_attachment_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_runtime_project_attachment_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuntimeProjectAttachmentServiceTransport() - adc.assert_called_once() - - -def test_runtime_project_attachment_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RuntimeProjectAttachmentServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - transports.RuntimeProjectAttachmentServiceRestTransport, - ], -) -def test_runtime_project_attachment_service_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.RuntimeProjectAttachmentServiceGrpcTransport, grpc_helpers), - ( - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - grpc_helpers_async, - ), - ], -) -def test_runtime_project_attachment_service_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_runtime_project_attachment_service_host_no_port(transport_name): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_runtime_project_attachment_service_host_with_port(transport_name): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_runtime_project_attachment_service_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = RuntimeProjectAttachmentServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = RuntimeProjectAttachmentServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_runtime_project_attachment._session - session2 = client2.transport.create_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.get_runtime_project_attachment._session - session2 = client2.transport.get_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.list_runtime_project_attachments._session - session2 = client2.transport.list_runtime_project_attachments._session - assert session1 != session2 - session1 = client1.transport.delete_runtime_project_attachment._session - session2 = client2.transport.delete_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.lookup_runtime_project_attachment._session - session2 = client2.transport.lookup_runtime_project_attachment._session - assert session1 != session2 - - -def test_runtime_project_attachment_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_runtime_project_attachment_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_runtime_project_attachment_path(): - project = "squid" - location = "clam" - runtime_project_attachment = "whelk" - expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( - project=project, - location=location, - runtime_project_attachment=runtime_project_attachment, - ) - actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( - project, location, runtime_project_attachment - ) - assert expected == actual - - -def test_parse_runtime_project_attachment_path(): - expected = { - "project": "octopus", - "location": "oyster", - "runtime_project_attachment": "nudibranch", - } - path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( - **expected - ) - - # Check that the path construction is reversible. - actual = ( - RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( - path - ) - ) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( - billing_account - ) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( - path - ) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = RuntimeProjectAttachmentServiceClient.common_organization_path( - organization - ) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = RuntimeProjectAttachmentServiceClient.common_location_path( - project, location - ) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_transport_kind(transport_name): + transport = RuntimeProjectAttachmentServiceClient.get_transport_class( + transport_name + )( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_runtime_project_attachment_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_runtime_project_attachment_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_runtime_project_attachment", + "get_runtime_project_attachment", + "list_runtime_project_attachments", + "delete_runtime_project_attachment", + "lookup_runtime_project_attachment", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_runtime_project_attachment_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_runtime_project_attachment_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation( - request={ - "name": "locations", - } +def test_runtime_project_attachment_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RuntimeProjectAttachmentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_no_port(transport_name): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_with_port(transport_name): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RuntimeProjectAttachmentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RuntimeProjectAttachmentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_runtime_project_attachment._session + session2 = client2.transport.create_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.get_runtime_project_attachment._session + session2 = client2.transport.get_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.list_runtime_project_attachments._session + session2 = client2.transport.list_runtime_project_attachments._session + assert session1 != session2 + session1 = client1.transport.delete_runtime_project_attachment._session + session2 = client2.transport.delete_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.lookup_runtime_project_attachment._session + session2 = client2.transport.lookup_runtime_project_attachment._session + assert session1 != session2 -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_runtime_project_attachment_path(): + project = "squid" + location = "clam" + runtime_project_attachment = "whelk" + expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( + project=project, + location=location, + runtime_project_attachment=runtime_project_attachment, + ) + actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + project, location, runtime_project_attachment ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_runtime_project_attachment_path(): + expected = { + "project": "octopus", + "location": "oyster", + "runtime_project_attachment": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + **expected + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ( + RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( + path + ) + ) + assert expected == actual -def test_cancel_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( + billing_account ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + assert expected == actual - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( + path ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RuntimeProjectAttachmentServiceClient.common_organization_path( + organization ) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RuntimeProjectAttachmentServiceClient.common_location_path( + project, location ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() - + prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_list_locations_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_operation(request) -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Establish that the response is the type that we expect. + assert response is None -def test_get_location(transport: str = "grpc"): +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert isinstance(response, operations_pb2.Operation) -def test_get_location_field_headers(): +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -7323,7 +3681,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = RuntimeProjectAttachmentServiceClient( @@ -7342,11 +3699,7 @@ def test_client_ctx(): [ ( RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, + transports.RuntimeProjectAttachmentServiceRestTransport, ), ], ) From d326f79034540e6028ccd6ac5f329f3269fa12b9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:29:01 -0400 Subject: [PATCH 02/59] chore: release main (#13056) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-cloud-apihub: 0.2.0 ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.1.0...google-cloud-apihub-v0.2.0) (2024-09-04) ### ⚠ BREAKING CHANGES * [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ### Bug Fixes * [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ([3762ff4](https://github.com/googleapis/google-cloud-python/commit/3762ff40e51466bc516939a31732300c8e20211a))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-cloud-apihub/CHANGELOG.md | 11 +++++++++++ .../google/cloud/apihub/gapic_version.py | 2 +- .../google/cloud/apihub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.apihub.v1.json | 2 +- 5 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a695d96468bb..4fe1785f9ff1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -17,7 +17,7 @@ "packages/google-cloud-api-keys": "0.5.11", "packages/google-cloud-apigee-connect": "1.9.5", "packages/google-cloud-apigee-registry": "0.6.11", - "packages/google-cloud-apihub": "0.1.0", + "packages/google-cloud-apihub": "0.2.0", "packages/google-cloud-appengine-admin": "1.11.5", "packages/google-cloud-appengine-logging": "1.4.5", "packages/google-cloud-apphub": "0.1.2", diff --git a/packages/google-cloud-apihub/CHANGELOG.md b/packages/google-cloud-apihub/CHANGELOG.md index ae96d0e193c5..6b8fb34aaf0b 100644 --- a/packages/google-cloud-apihub/CHANGELOG.md +++ b/packages/google-cloud-apihub/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.1.0...google-cloud-apihub-v0.2.0) (2024-09-04) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) + +### Bug Fixes + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ([3762ff4](https://github.com/googleapis/google-cloud-python/commit/3762ff40e51466bc516939a31732300c8e20211a)) + ## 0.1.0 (2024-08-08) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index 475a2011ac20..2dd2f7e3c87d 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apihub", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { From 2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487 Mon Sep 17 00:00:00 2001 From: "owlbot-bootstrapper[bot]" <104649659+owlbot-bootstrapper[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 21:32:03 +0000 Subject: [PATCH 03/59] feat: add initial files for google.marketingplatform.admin.v1alpha (#13060) Source-Link: https://github.com/googleapis/googleapis-gen/commit/01202948aeacf502f63d3d01995521589e4c6db4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcmtldGluZ3BsYXRmb3JtLWFkbWluLy5Pd2xCb3QueWFtbCIsImgiOiIwMTIwMjk0OGFlYWNmNTAyZjYzZDNkMDE5OTU1MjE1ODllNGM2ZGI0In0= PiperOrigin-RevId: 0 --------- Co-authored-by: Owlbot Bootstrapper Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.OwlBot.yaml | 18 + .../.coveragerc | 13 + .../.flake8 | 33 + .../.gitignore | 63 + .../.repo-metadata.json | 17 + .../CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../CONTRIBUTING.rst | 271 + .../LICENSE | 202 + .../MANIFEST.in | 25 + .../README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../docs/conf.py | 384 ++ .../docs/index.rst | 23 + .../marketingplatform_admin_service.rst | 10 + .../services_.rst | 6 + .../types_.rst | 6 + .../docs/multiprocessing.rst | 7 + .../ads/marketingplatform_admin/__init__.py | 57 + .../marketingplatform_admin/gapic_version.py | 16 + .../ads/marketingplatform_admin/py.typed | 2 + .../__init__.py | 51 + .../gapic_metadata.json | 103 + .../gapic_version.py | 16 + .../marketingplatform_admin_v1alpha/py.typed | 2 + .../services/__init__.py | 15 + .../__init__.py | 22 + .../async_client.py | 894 +++ .../marketingplatform_admin_service/client.py | 1349 ++++ .../marketingplatform_admin_service/pagers.py | 208 + .../transports/__init__.py | 41 + .../transports/base.py | 232 + .../transports/grpc.py | 412 ++ .../transports/grpc_asyncio.py | 444 ++ .../transports/rest.py | 858 +++ .../types/__init__.py | 40 + .../types/marketingplatform_admin.py | 217 + .../types/resources.py | 120 + .../mypy.ini | 3 + .../noxfile.py | 452 ++ ...ice_create_analytics_account_link_async.py | 56 + ...vice_create_analytics_account_link_sync.py | 56 + ...ice_delete_analytics_account_link_async.py | 50 + ...vice_delete_analytics_account_link_sync.py | 50 + ...rm_admin_service_get_organization_async.py | 52 + ...orm_admin_service_get_organization_sync.py | 52 + ...vice_list_analytics_account_links_async.py | 53 + ...rvice_list_analytics_account_links_sync.py | 53 + ...ervice_set_property_service_level_async.py | 54 + ...service_set_property_service_level_sync.py | 54 + ...oogle.marketingplatform.admin.v1alpha.json | 822 +++ .../scripts/decrypt-secrets.sh | 46 + ...arketingplatform_admin_v1alpha_keywords.py | 180 + .../setup.py | 95 + .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../__init__.py | 15 + .../test_marketingplatform_admin_service.py | 5969 +++++++++++++++++ 69 files changed, 14653 insertions(+) create mode 100644 packages/google-ads-marketingplatform-admin/.OwlBot.yaml create mode 100644 packages/google-ads-marketingplatform-admin/.coveragerc create mode 100644 packages/google-ads-marketingplatform-admin/.flake8 create mode 100644 packages/google-ads-marketingplatform-admin/.gitignore create mode 100644 packages/google-ads-marketingplatform-admin/.repo-metadata.json create mode 100644 packages/google-ads-marketingplatform-admin/CHANGELOG.md create mode 100644 packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md create mode 100644 packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst create mode 100644 packages/google-ads-marketingplatform-admin/LICENSE create mode 100644 packages/google-ads-marketingplatform-admin/MANIFEST.in create mode 100644 packages/google-ads-marketingplatform-admin/README.rst create mode 120000 packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md create mode 120000 packages/google-ads-marketingplatform-admin/docs/README.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/_static/custom.css create mode 100644 packages/google-ads-marketingplatform-admin/docs/_templates/layout.html create mode 100644 packages/google-ads-marketingplatform-admin/docs/conf.py create mode 100644 packages/google-ads-marketingplatform-admin/docs/index.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py create mode 100644 packages/google-ads-marketingplatform-admin/mypy.ini create mode 100644 packages/google-ads-marketingplatform-admin/noxfile.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json create mode 100755 packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh create mode 100644 packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py create mode 100644 packages/google-ads-marketingplatform-admin/setup.py create mode 100644 packages/google-ads-marketingplatform-admin/testing/.gitignore create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt create mode 100644 packages/google-ads-marketingplatform-admin/tests/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py diff --git a/packages/google-ads-marketingplatform-admin/.OwlBot.yaml b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml new file mode 100644 index 000000000000..d397bf3a63bd --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/marketingplatform/admin/(v.*)/.*-py + dest: /owl-bot-staging/google-ads-marketingplatform-admin/$1 +api-name: google-ads-marketingplatform-admin diff --git a/packages/google-ads-marketingplatform-admin/.coveragerc b/packages/google-ads-marketingplatform-admin/.coveragerc new file mode 100644 index 000000000000..f2b0df425e8e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/ads/marketingplatform_admin/__init__.py + google/ads/marketingplatform_admin/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-ads-marketingplatform-admin/.flake8 b/packages/google-ads-marketingplatform-admin/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-ads-marketingplatform-admin/.gitignore b/packages/google-ads-marketingplatform-admin/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-ads-marketingplatform-admin/.repo-metadata.json b/packages/google-ads-marketingplatform-admin/.repo-metadata.json new file mode 100644 index 000000000000..094cd0a04207 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-ads-marketingplatform-admin", + "name_pretty": "Google Marketing Platform Admin API", + "api_description": "The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties.", + "product_documentation": "https://developers.google.com/analytics/devguides/config/gmp/v1", + "client_documentation": "https://googleapis.dev/python/google-ads-marketingplatform-admin/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-ads-marketingplatform-admin", + "api_id": "marketingplatformadmin.googleapis.com", + "default_version": "v1alpha", + "codeowner_team": "", + "api_shortname": "marketingplatformadmin" +} diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst new file mode 100644 index 000000000000..a7223a5e0b19 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-ads-marketingplatform-admin + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-ads-marketingplatform-admin/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-ads-marketingplatform-admin/LICENSE b/packages/google-ads-marketingplatform-admin/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-ads-marketingplatform-admin/MANIFEST.in b/packages/google-ads-marketingplatform-admin/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-ads-marketingplatform-admin/README.rst b/packages/google-ads-marketingplatform-admin/README.rst new file mode 100644 index 000000000000..0c99d23ee7c1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Marketing Platform Admin API +===================================================== + +|preview| |pypi| |versions| + +`Google Marketing Platform Admin API`_: The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. _Google Marketing Platform Admin API: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Client Library Documentation: https://googleapis.dev/python/google-ads-marketingplatform-admin/latest +.. _Product Documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Marketing Platform Admin API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Marketing Platform Admin API.: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-ads-marketingplatform-admin + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-ads-marketingplatform-admin + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Marketing Platform Admin API + to see other available methods on the client. +- Read the `Google Marketing Platform Admin API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Marketing Platform Admin API Product documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/README.rst b/packages/google-ads-marketingplatform-admin/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/_static/custom.css b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-ads-marketingplatform-admin/docs/conf.py b/packages/google-ads-marketingplatform-admin/docs/conf.py new file mode 100644 index 000000000000..5c68a3e1a72f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-ads-marketingplatform-admin documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-ads-marketingplatform-admin" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-ads-marketingplatform-admin", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-ads-marketingplatform-admin-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin.tex", + "google-ads-marketingplatform-admin Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + author, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-ads-marketingplatform-admin/docs/index.rst b/packages/google-ads-marketingplatform-admin/docs/index.rst new file mode 100644 index 000000000000..b217f5c4b15d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_v1alpha/services_ + marketingplatform_admin_v1alpha/types_ + + +Changelog +--------- + +For a list of all ``google-ads-marketingplatform-admin`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst new file mode 100644 index 000000000000..938e180ef989 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst @@ -0,0 +1,10 @@ +MarketingplatformAdminService +----------------------------------------------- + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service + :members: + :inherited-members: + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst new file mode 100644 index 000000000000..427be3b19a50 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst @@ -0,0 +1,6 @@ +Services for Google Ads Marketingplatform Admin v1alpha API +=========================================================== +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_service diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst new file mode 100644 index 000000000000..829ca4ea07f4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ads Marketingplatform Admin v1alpha API +======================================================== + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py new file mode 100644 index 000000000000..56669ac018e6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.async_client import ( + MarketingplatformAdminServiceAsyncClient, +) +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.client import ( + MarketingplatformAdminServiceClient, +) +from google.ads.marketingplatform_admin_v1alpha.types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from google.ads.marketingplatform_admin_v1alpha.types.resources import ( + AnalyticsAccountLink, + LinkVerificationState, + Organization, +) + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..c04bf34623a8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, +) +from .types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .types.resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "MarketingplatformAdminServiceAsyncClient", + "AnalyticsAccountLink", + "AnalyticsServiceLevel", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "LinkVerificationState", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "MarketingplatformAdminServiceClient", + "Organization", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..8d346e91ed67 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ads.marketingplatform_admin_v1alpha", + "protoPackage": "google.marketingplatform.admin.v1alpha", + "schema": "1.0", + "services": { + "MarketingplatformAdminService": { + "clients": { + "grpc": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MarketingplatformAdminServiceAsyncClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "rest": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py new file mode 100644 index 000000000000..e634b30fd6a2 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MarketingplatformAdminServiceAsyncClient +from .client import MarketingplatformAdminServiceClient + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py new file mode 100644 index 000000000000..cc9647487d98 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py @@ -0,0 +1,894 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .client import MarketingplatformAdminServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport + + +class MarketingplatformAdminServiceAsyncClient: + """Service Interface for the Google Marketing Platform Admin + API. + """ + + _client: MarketingplatformAdminServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(MarketingplatformAdminServiceClient.account_path) + parse_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_account_path + ) + analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.analytics_account_link_path + ) + parse_analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.parse_analytics_account_link_path + ) + organization_path = staticmethod( + MarketingplatformAdminServiceClient.organization_path + ) + parse_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_organization_path + ) + property_path = staticmethod(MarketingplatformAdminServiceClient.property_path) + parse_property_path = staticmethod( + MarketingplatformAdminServiceClient.parse_property_path + ) + common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + MarketingplatformAdminServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + MarketingplatformAdminServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_info.__func__(MarketingplatformAdminServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_file.__func__(MarketingplatformAdminServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MarketingplatformAdminServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MarketingplatformAdminServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MarketingplatformAdminServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]]): + The request object. Request message for GetOrganization + RPC. + name (:class:`str`): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_organization + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksAsyncPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (:class:`str`): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAnalyticsAccountLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (:class:`str`): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (:class:`google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink`): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (:class:`str`): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (:class:`str`): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MarketingplatformAdminServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceAsyncClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py new file mode 100644 index 000000000000..f31761153ab6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -0,0 +1,1349 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc import MarketingplatformAdminServiceGrpcTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .transports.rest import MarketingplatformAdminServiceRestTransport + + +class MarketingplatformAdminServiceClientMeta(type): + """Metaclass for the MarketingplatformAdminService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] + _transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = MarketingplatformAdminServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MarketingplatformAdminServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MarketingplatformAdminServiceClient( + metaclass=MarketingplatformAdminServiceClientMeta +): + """Service Interface for the Google Marketing Platform Admin + API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "marketingplatformadmin.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "marketingplatformadmin.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def analytics_account_link_path( + organization: str, + analytics_account_link: str, + ) -> str: + """Returns a fully-qualified analytics_account_link string.""" + return "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + + @staticmethod + def parse_analytics_account_link_path(path: str) -> Dict[str, str]: + """Parses a analytics_account_link path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/analyticsAccountLinks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_organization_path(path: str) -> Dict[str, str]: + """Parses a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def property_path( + property: str, + ) -> str: + """Returns a fully-qualified property string.""" + return "properties/{property}".format( + property=property, + ) + + @staticmethod + def parse_property_path(path: str) -> Dict[str, str]: + """Parses a property path into its component segments.""" + m = re.match(r"^properties/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or MarketingplatformAdminServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MarketingplatformAdminServiceClient._read_environment_variables() + self._client_cert_source = ( + MarketingplatformAdminServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + MarketingplatformAdminServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, MarketingplatformAdminServiceTransport + ) + if transport_provided: + # transport is a MarketingplatformAdminServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MarketingplatformAdminServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or MarketingplatformAdminServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[MarketingplatformAdminServiceTransport], + Callable[..., MarketingplatformAdminServiceTransport], + ] = ( + MarketingplatformAdminServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., MarketingplatformAdminServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]): + The request object. Request message for GetOrganization + RPC. + name (str): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_organization] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAnalyticsAccountLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (str): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (str): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MarketingplatformAdminServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py new file mode 100644 index 000000000000..bed8bd431770 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +class ListAnalyticsAccountLinksPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., marketingplatform_admin.ListAnalyticsAccountLinksResponse + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.AnalyticsAccountLink]: + for page in self.pages: + yield from page.analytics_account_links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnalyticsAccountLinksAsyncPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse] + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.AnalyticsAccountLink]: + async def async_generator(): + async for page in self.pages: + for response in page.analytics_account_links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py new file mode 100644 index 000000000000..205d647ea99a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport +from .grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .rest import ( + MarketingplatformAdminServiceRestInterceptor, + MarketingplatformAdminServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] +_transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MarketingplatformAdminServiceGrpcAsyncIOTransport +_transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + +__all__ = ( + "MarketingplatformAdminServiceTransport", + "MarketingplatformAdminServiceGrpcTransport", + "MarketingplatformAdminServiceGrpcAsyncIOTransport", + "MarketingplatformAdminServiceRestTransport", + "MarketingplatformAdminServiceRestInterceptor", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py new file mode 100644 index 000000000000..6f70b5c211e1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MarketingplatformAdminServiceTransport(abc.ABC): + """Abstract transport class for MarketingplatformAdminService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ) + + DEFAULT_HOST: str = "marketingplatformadmin.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_organization: gapic_v1.method.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Union[resources.Organization, Awaitable[resources.Organization]], + ]: + raise NotImplementedError() + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Union[ + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Union[ + resources.AnalyticsAccountLink, Awaitable[resources.AnalyticsAccountLink] + ], + ]: + raise NotImplementedError() + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Union[ + marketingplatform_admin.SetPropertyServiceLevelResponse, + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MarketingplatformAdminServiceTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py new file mode 100644 index 000000000000..e8f3656c2d06 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport + + +class MarketingplatformAdminServiceGrpcTransport( + MarketingplatformAdminServiceTransport +): + """gRPC backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + ~.Organization]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + ~.ListAnalyticsAccountLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + ~.AnalyticsAccountLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + ~.SetPropertyServiceLevelResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MarketingplatformAdminServiceGrpcTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..caef725e70be --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport + + +class MarketingplatformAdminServiceGrpcAsyncIOTransport( + MarketingplatformAdminServiceTransport +): + """gRPC AsyncIO backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Awaitable[resources.Organization], + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + Awaitable[~.Organization]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + Awaitable[~.ListAnalyticsAccountLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Awaitable[resources.AnalyticsAccountLink], + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + Awaitable[~.AnalyticsAccountLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + Awaitable[~.SetPropertyServiceLevelResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_organization: gapic_v1.method_async.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method_async.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method_async.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method_async.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method_async.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MarketingplatformAdminServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py new file mode 100644 index 000000000000..b3894ca05fc4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MarketingplatformAdminServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MarketingplatformAdminServiceRestInterceptor: + """Interceptor for MarketingplatformAdminService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MarketingplatformAdminServiceRestTransport. + + .. code-block:: python + class MyCustomMarketingplatformAdminServiceInterceptor(MarketingplatformAdminServiceRestInterceptor): + def pre_create_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_analytics_account_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_organization(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_organization(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_analytics_account_links(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_analytics_account_links(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_property_service_level(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_property_service_level(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MarketingplatformAdminServiceRestTransport(interceptor=MyCustomMarketingplatformAdminServiceInterceptor()) + client = MarketingplatformAdminServiceClient(transport=transport) + + + """ + + def pre_create_analytics_account_link( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_create_analytics_account_link( + self, response: resources.AnalyticsAccountLink + ) -> resources.AnalyticsAccountLink: + """Post-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_delete_analytics_account_link( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def pre_get_organization( + self, + request: marketingplatform_admin.GetOrganizationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.GetOrganizationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_organization + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_get_organization( + self, response: resources.Organization + ) -> resources.Organization: + """Post-rpc interceptor for get_organization + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_list_analytics_account_links( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_list_analytics_account_links( + self, response: marketingplatform_admin.ListAnalyticsAccountLinksResponse + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + """Post-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_set_property_service_level( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.SetPropertyServiceLevelRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_set_property_service_level( + self, response: marketingplatform_admin.SetPropertyServiceLevelResponse + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + """Post-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MarketingplatformAdminServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MarketingplatformAdminServiceRestInterceptor + + +class MarketingplatformAdminServiceRestTransport( + MarketingplatformAdminServiceTransport +): + """REST backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MarketingplatformAdminServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or MarketingplatformAdminServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("CreateAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Call the create analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.CreateAnalyticsAccountLinkRequest): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + "body": "analytics_account_link", + }, + ] + request, metadata = self._interceptor.pre_create_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.AnalyticsAccountLink() + pb_resp = resources.AnalyticsAccountLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_analytics_account_link(resp) + return resp + + class _DeleteAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("DeleteAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.DeleteAnalyticsAccountLinkRequest): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=organizations/*/analyticsAccountLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetOrganization(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("GetOrganization") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.GetOrganizationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Call the get organization method over HTTP. + + Args: + request (~.marketingplatform_admin.GetOrganizationRequest): + The request object. Request message for GetOrganization + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=organizations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_organization( + request, metadata + ) + pb_request = marketingplatform_admin.GetOrganizationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Organization() + pb_resp = resources.Organization.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_organization(resp) + return resp + + class _ListAnalyticsAccountLinks(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("ListAnalyticsAccountLinks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + r"""Call the list analytics account + links method over HTTP. + + Args: + request (~.marketingplatform_admin.ListAnalyticsAccountLinksRequest): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.ListAnalyticsAccountLinksResponse: + Response message for + ListAnalyticsAccountLinks RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + }, + ] + request, metadata = self._interceptor.pre_list_analytics_account_links( + request, metadata + ) + pb_request = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + pb_resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_analytics_account_links(resp) + return resp + + class _SetPropertyServiceLevel(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("SetPropertyServiceLevel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Call the set property service + level method over HTTP. + + Args: + request (~.marketingplatform_admin.SetPropertyServiceLevelRequest): + The request object. Request message for + SetPropertyServiceLevel RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_property_service_level( + request, metadata + ) + pb_request = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.SetPropertyServiceLevelResponse() + pb_resp = marketingplatform_admin.SetPropertyServiceLevelResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_property_service_level(resp) + return resp + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrganization(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAnalyticsAccountLinks(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetPropertyServiceLevel(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MarketingplatformAdminServiceRestTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py new file mode 100644 index 000000000000..617c3bec15b6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py new file mode 100644 index 000000000000..a446e0c57b69 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import resources + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "AnalyticsServiceLevel", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + }, +) + + +class AnalyticsServiceLevel(proto.Enum): + r"""Various levels of service for Google Analytics. + + Values: + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED (0): + Service level unspecified. + ANALYTICS_SERVICE_LEVEL_STANDARD (1): + The standard version of Google Analytics. + ANALYTICS_SERVICE_LEVEL_360 (2): + The premium version of Google Analytics. + """ + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED = 0 + ANALYTICS_SERVICE_LEVEL_STANDARD = 1 + ANALYTICS_SERVICE_LEVEL_360 = 2 + + +class GetOrganizationRequest(proto.Message): + r"""Request message for GetOrganization RPC. + + Attributes: + name (str): + Required. The name of the Organization to retrieve. Format: + organizations/{org_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAnalyticsAccountLinksRequest(proto.Message): + r"""Request message for ListAnalyticsAccountLinks RPC. + + Attributes: + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + page_size (int): + Optional. The maximum number of Analytics + account links to return in one call. The service + may return fewer than this value. + + If unspecified, at most 50 Analytics account + links will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ListAnalyticsAccountLinks call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAnalyticsAccountLinks`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAnalyticsAccountLinksResponse(proto.Message): + r"""Response message for ListAnalyticsAccountLinks RPC. + + Attributes: + analytics_account_links (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink]): + Analytics account links in this organization. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + analytics_account_links: MutableSequence[ + resources.AnalyticsAccountLink + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.AnalyticsAccountLink, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for CreateAnalyticsAccountLink RPC. + + Attributes: + parent (str): + Required. The parent resource where this Analytics account + link will be created. Format: organizations/{org_id} + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account_link: resources.AnalyticsAccountLink = proto.Field( + proto.MESSAGE, + number=2, + message=resources.AnalyticsAccountLink, + ) + + +class DeleteAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for DeleteAnalyticsAccountLink RPC. + + Attributes: + name (str): + Required. The name of the Analytics account link to delete. + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SetPropertyServiceLevelRequest(proto.Message): + r"""Request message for SetPropertyServiceLevel RPC. + + Attributes: + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where this + property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + analytics_property (str): + Required. The Analytics property to change the ServiceLevel + setting. This field is the name of the Google Analytics + Admin API property resource. + + Format: + analyticsadmin.googleapis.com/properties/{property_id} + service_level (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsServiceLevel): + Required. The service level to set for this + property. + """ + + analytics_account_link: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_property: str = proto.Field( + proto.STRING, + number=2, + ) + service_level: "AnalyticsServiceLevel" = proto.Field( + proto.ENUM, + number=3, + enum="AnalyticsServiceLevel", + ) + + +class SetPropertyServiceLevelResponse(proto.Message): + r"""Response message for SetPropertyServiceLevel RPC.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py new file mode 100644 index 000000000000..420e17747cf7 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "LinkVerificationState", + "Organization", + "AnalyticsAccountLink", + }, +) + + +class LinkVerificationState(proto.Enum): + r"""The verification state of the link between a product account + and a GMP organization. + + Values: + LINK_VERIFICATION_STATE_UNSPECIFIED (0): + The link state is unknown. + LINK_VERIFICATION_STATE_VERIFIED (1): + The link is established. + LINK_VERIFICATION_STATE_NOT_VERIFIED (2): + The link is requested, but hasn't been + approved by the product account admin. + """ + LINK_VERIFICATION_STATE_UNSPECIFIED = 0 + LINK_VERIFICATION_STATE_VERIFIED = 1 + LINK_VERIFICATION_STATE_NOT_VERIFIED = 2 + + +class Organization(proto.Message): + r"""A resource message representing a Google Marketing Platform + organization. + + Attributes: + name (str): + Identifier. The resource name of the GMP organization. + Format: organizations/{org_id} + display_name (str): + The human-readable name for the organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyticsAccountLink(proto.Message): + r"""A resource message representing the link between a Google + Analytics account and a Google Marketing Platform organization. + + Attributes: + name (str): + Identifier. Resource name of this AnalyticsAccountLink. Note + the resource ID is the same as the ID of the Analtyics + account. + + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + Example: "organizations/xyz/analyticsAccountLinks/1234". + analytics_account (str): + Required. Immutable. The resource name of the AnalyticsAdmin + API account. The account ID will be used as the ID of this + AnalyticsAccountLink resource, which will become the final + component of the resource name. + + Format: analyticsadmin.googleapis.com/accounts/{account_id} + display_name (str): + Output only. The human-readable name for the + Analytics account. + link_verification_state (google.ads.marketingplatform_admin_v1alpha.types.LinkVerificationState): + Output only. The verification state of the + link between the Analytics account and the + parent organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + link_verification_state: "LinkVerificationState" = proto.Field( + proto.ENUM, + number=4, + enum="LinkVerificationState", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/mypy.ini b/packages/google-ads-marketingplatform-admin/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-ads-marketingplatform-admin/noxfile.py b/packages/google-ads-marketingplatform-admin/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py new file mode 100644 index 000000000000..bfd28a483b92 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py new file mode 100644 index 000000000000..6af1b08a5a3b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py new file mode 100644 index 000000000000..c0b2c7e1ffa9 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py new file mode 100644 index 000000000000..8f1a794eacac --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py new file mode 100644 index 000000000000..7666fa53e916 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py new file mode 100644 index 000000000000..52b506c61914 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py new file mode 100644 index 000000000000..3837010ff87f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py new file mode 100644 index 000000000000..af3ed458056a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py new file mode 100644 index 000000000000..b07e73cde9f5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py new file mode 100644 index 000000000000..a742b4f50f64 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json new file mode 100644 index 000000000000..72abc6186c7b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -0,0 +1,822 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.marketingplatform.admin.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-ads-marketingplatform-admin", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py" + } + ] +} diff --git a/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py new file mode 100644 index 000000000000..eb6dc67078de --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class marketingplatform_adminCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_analytics_account_link': ('parent', 'analytics_account_link', ), + 'delete_analytics_account_link': ('name', ), + 'get_organization': ('name', ), + 'list_analytics_account_links': ('parent', 'page_size', 'page_token', ), + 'set_property_service_level': ('analytics_account_link', 'analytics_property', 'service_level', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=marketingplatform_adminCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the marketingplatform_admin client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ads-marketingplatform-admin/setup.py b/packages/google-ads-marketingplatform-admin/setup.py new file mode 100644 index 000000000000..bd6f637c0bf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-ads-marketingplatform-admin" + + +description = "Google Ads Marketingplatform Admin API client library" + +version = None + +with open( + os.path.join(package_root, "google/ads/marketingplatform_admin/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-ads-marketingplatform-admin/testing/.gitignore b/packages/google-ads-marketingplatform-admin/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/tests/__init__.py b/packages/google-ads-marketingplatform-admin/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py new file mode 100644 index 000000000000..c0d88b7ea387 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -0,0 +1,5969 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, + pagers, + transports, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MarketingplatformAdminServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, False) is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain(None, None) + == MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, "grpc"), + (transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MarketingplatformAdminServiceRestTransport, "rest"), + ], +) +def test_marketingplatform_admin_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +def test_marketingplatform_admin_service_client_get_transport_class(): + transport = MarketingplatformAdminServiceClient.get_transport_class() + available_transports = [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceRestTransport, + ] + assert transport in available_transports + + transport = MarketingplatformAdminServiceClient.get_transport_class("grpc") + assert transport == transports.MarketingplatformAdminServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "true", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "false", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_marketingplatform_admin_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + None, + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_marketingplatform_admin_service_client_client_options_from_dict(): + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MarketingplatformAdminServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=None, + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + response = client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +def test_get_organization_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + +def test_get_organization_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +@pytest.mark.asyncio +async def test_get_organization_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_organization + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_organization + ] = mock_rpc + + request = {} + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.GetOrganizationRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_organization_async_from_dict(): + await test_get_organization_async(request_type=dict) + + +def test_get_organization_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = resources.Organization() + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_organization_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_organization_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_organization_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_organization_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_organization_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +def test_list_analytics_account_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_analytics_account_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_analytics_account_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_from_dict(): + await test_list_analytics_account_links_async(request_type=dict) + + +def test_list_analytics_account_links_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_analytics_account_links_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_analytics_account_links_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_pager(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_analytics_account_links( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + +def test_list_analytics_account_links_pages(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_analytics_account_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pager(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_analytics_account_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pages(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_analytics_account_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + response = client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +def test_create_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + +def test_create_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_from_dict(): + await test_create_analytics_account_link_async(request_type=dict) + + +def test_create_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = resources.AnalyticsAccountLink() + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +def test_create_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +def test_delete_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + +def test_delete_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_from_dict(): + await test_delete_analytics_account_link_async(request_type=dict) + + +def test_delete_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = None + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + response = client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +def test_set_property_service_level_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + +def test_set_property_service_level_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_property_service_level + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_property_service_level + ] = mock_rpc + + request = {} + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_from_dict(): + await test_set_property_service_level_async(request_type=dict) + + +def test_set_property_service_level_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_property_service_level_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +def test_set_property_service_level_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +def test_set_property_service_level_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_organization(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_organization_rest_required_fields( + request_type=marketingplatform_admin.GetOrganizationRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_organization(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.GetOrganizationRequest.pb( + marketingplatform_admin.GetOrganizationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Organization.to_json( + resources.Organization() + ) + + request = marketingplatform_admin.GetOrganizationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Organization() + + client.get_organization( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_organization_rest_bad_request( + transport: str = "rest", request_type=marketingplatform_admin.GetOrganizationRequest +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_organization(request) + + +def test_get_organization_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_organization(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*}" % client.transport._host, args[1] + ) + + +def test_get_organization_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +def test_get_organization_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_analytics_account_links(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_analytics_account_links_rest_required_fields( + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_analytics_account_links(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_analytics_account_links_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_analytics_account_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_analytics_account_links_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_analytics_account_links", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_list_analytics_account_links", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + marketingplatform_admin.ListAnalyticsAccountLinksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + ) + + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + client.list_analytics_account_links( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_analytics_account_links_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_analytics_account_links(request) + + +def test_list_analytics_account_links_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_analytics_account_links(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_list_analytics_account_links_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_rest_pager(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_analytics_account_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + pages = list(client.list_analytics_account_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request_init["analytics_account_link"] = { + "name": "name_value", + "analytics_account": "analytics_account_value", + "display_name": "display_name_value", + "link_verification_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.meta.fields[ + "analytics_account_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "analytics_account_link" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["analytics_account_link"][field])): + del request_init["analytics_account_link"][field][i][subfield] + else: + del request_init["analytics_account_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analyticsAccountLink", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_create_analytics_account_link", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_create_analytics_account_link", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.AnalyticsAccountLink.to_json( + resources.AnalyticsAccountLink() + ) + + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.AnalyticsAccountLink() + + client.create_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_analytics_account_link(request) + + +def test_create_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_create_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +def test_create_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_delete_analytics_account_link", + ) as pre: + pre.assert_not_called() + pb_message = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_analytics_account_link(request) + + +def test_delete_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*/analyticsAccountLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +def test_delete_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_property_service_level(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_property_service_level_rest_required_fields( + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["analytics_account_link"] = "" + request_init["analytics_property"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["analyticsAccountLink"] = "analytics_account_link_value" + jsonified_request["analyticsProperty"] = "analytics_property_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "analyticsAccountLink" in jsonified_request + assert jsonified_request["analyticsAccountLink"] == "analytics_account_link_value" + assert "analyticsProperty" in jsonified_request + assert jsonified_request["analyticsProperty"] == "analytics_property_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_property_service_level(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_property_service_level_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_property_service_level._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analyticsAccountLink", + "analyticsProperty", + "serviceLevel", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_property_service_level_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_set_property_service_level", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_set_property_service_level", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + marketingplatform_admin.SetPropertyServiceLevelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.SetPropertyServiceLevelResponse.to_json( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + ) + + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + client.set_property_service_level( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_property_service_level_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_property_service_level(request) + + +def test_set_property_service_level_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + + # get truthy value for each flattened field + mock_args = dict( + analytics_account_link="analytics_account_link_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_property_service_level(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel" + % client.transport._host, + args[1], + ) + + +def test_set_property_service_level_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +def test_set_property_service_level_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MarketingplatformAdminServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MarketingplatformAdminServiceGrpcTransport, + ) + + +def test_marketingplatform_admin_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_marketingplatform_admin_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_organization", + "list_analytics_account_links", + "create_analytics_account_link", + "delete_analytics_account_link", + "set_property_service_level", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_marketingplatform_admin_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +def test_marketingplatform_admin_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport() + adc.assert_called_once() + + +def test_marketingplatform_admin_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MarketingplatformAdminServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, grpc_helpers), + ( + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=["1", "2"], + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_marketingplatform_admin_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MarketingplatformAdminServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_no_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_with_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_marketingplatform_admin_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MarketingplatformAdminServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MarketingplatformAdminServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_organization._session + session2 = client2.transport.get_organization._session + assert session1 != session2 + session1 = client1.transport.list_analytics_account_links._session + session2 = client2.transport.list_analytics_account_links._session + assert session1 != session2 + session1 = client1.transport.create_analytics_account_link._session + session2 = client2.transport.create_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.delete_analytics_account_link._session + session2 = client2.transport.delete_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.set_property_service_level._session + session2 = client2.transport.set_property_service_level._session + assert session1 != session2 + + +def test_marketingplatform_admin_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_marketingplatform_admin_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = MarketingplatformAdminServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = MarketingplatformAdminServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_account_path(path) + assert expected == actual + + +def test_analytics_account_link_path(): + organization = "whelk" + analytics_account_link = "octopus" + expected = "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + actual = MarketingplatformAdminServiceClient.analytics_account_link_path( + organization, analytics_account_link + ) + assert expected == actual + + +def test_parse_analytics_account_link_path(): + expected = { + "organization": "oyster", + "analytics_account_link": "nudibranch", + } + path = MarketingplatformAdminServiceClient.analytics_account_link_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_analytics_account_link_path(path) + assert expected == actual + + +def test_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.organization_path(organization) + assert expected == actual + + +def test_parse_organization_path(): + expected = { + "organization": "mussel", + } + path = MarketingplatformAdminServiceClient.organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_organization_path(path) + assert expected == actual + + +def test_property_path(): + property = "winkle" + expected = "properties/{property}".format( + property=property, + ) + actual = MarketingplatformAdminServiceClient.property_path(property) + assert expected == actual + + +def test_parse_property_path(): + expected = { + "property": "nautilus", + } + path = MarketingplatformAdminServiceClient.property_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_property_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MarketingplatformAdminServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = MarketingplatformAdminServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MarketingplatformAdminServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = MarketingplatformAdminServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = MarketingplatformAdminServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = MarketingplatformAdminServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = MarketingplatformAdminServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MarketingplatformAdminServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = MarketingplatformAdminServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MarketingplatformAdminServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From c365adb60deda36cb8227fa2eaccc522bdd408fd Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Thu, 5 Sep 2024 14:36:48 -0700 Subject: [PATCH 04/59] chore: Update release-please config files (#13061) Update release-please config files --- .release-please-manifest.json | 1 + release-please-config.json | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4fe1785f9ff1..146cef793774 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,5 +1,6 @@ { "packages/google-ads-admanager": "0.1.2", + "packages/google-ads-marketingplatform-admin": "0.0.0", "packages/google-ai-generativelanguage": "0.6.9", "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", diff --git a/release-please-config.json b/release-please-config.json index ea9a89e5e9fc..b2f26908453f 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -15,6 +15,21 @@ ], "release-type": "python" }, + "packages/google-ads-marketingplatform-admin": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-ads-marketingplatform-admin", + "extra-files": [ + "google/ads/marketingplatform_admin/gapic_version.py", + "google/ads/marketingplatform_admin_v1alpha/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-ai-generativelanguage": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, From b76759921d8bedf8edfbf2a97fb6a614fbc5c1ce Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 17:45:04 -0400 Subject: [PATCH 05/59] chore: release main (#13062) :robot: I have created a release *beep* *boop* ---
google-ads-marketingplatform-admin: 0.1.0 ## 0.1.0 (2024-09-05) ### Features * add initial files for google.marketingplatform.admin.v1alpha ([#13060](https://github.com/googleapis/google-cloud-python/issues/13060)) ([2bbab3b](https://github.com/googleapis/google-cloud-python/commit/2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- .../google-ads-marketingplatform-admin/CHANGELOG.md | 11 ++++++++++- .../ads/marketingplatform_admin/gapic_version.py | 2 +- .../marketingplatform_admin_v1alpha/gapic_version.py | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 146cef793774..477ae9480c55 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,6 +1,6 @@ { "packages/google-ads-admanager": "0.1.2", - "packages/google-ads-marketingplatform-admin": "0.0.0", + "packages/google-ads-marketingplatform-admin": "0.1.0", "packages/google-ai-generativelanguage": "0.6.9", "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md index 5ddad421e08f..f8676c0292af 100644 --- a/packages/google-ads-marketingplatform-admin/CHANGELOG.md +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -1 +1,10 @@ -# Changelog \ No newline at end of file +# Changelog + +## 0.1.0 (2024-09-05) + + +### Features + +* add initial files for google.marketingplatform.admin.v1alpha ([#13060](https://github.com/googleapis/google-cloud-python/issues/13060)) ([2bbab3b](https://github.com/googleapis/google-cloud-python/commit/2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487)) + +## Changelog diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} From 0ee300a0497968aa2c85969924b37f95f67675f0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 19:42:21 -0400 Subject: [PATCH 06/59] feat: [google-apps-chat] Add CHAT_SPACE link type support for GA launch (#13064) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 671436186 Source-Link: https://github.com/googleapis/googleapis/commit/f21743b4fe99a37e86522823454a67203113b43a Source-Link: https://github.com/googleapis/googleapis-gen/commit/01202948aeacf502f63d3d01995521589e4c6db4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiMDEyMDI5NDhhZWFjZjUwMmY2M2QzZDAxOTk1NTIxNTg5ZTRjNmRiNCJ9 --------- Co-authored-by: Owl Bot --- packages/google-apps-chat/chat-v1-py.tar.gz | Bin 0 -> 131072 bytes .../google/apps/chat/__init__.py | 2 + .../google/apps/chat_v1/__init__.py | 2 + .../google/apps/chat_v1/types/__init__.py | 2 + .../google/apps/chat_v1/types/annotation.py | 52 ++++++++++++++++++ .../unit/gapic/chat_v1/test_chat_service.py | 10 ++++ 6 files changed, 68 insertions(+) diff --git a/packages/google-apps-chat/chat-v1-py.tar.gz b/packages/google-apps-chat/chat-v1-py.tar.gz index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..496d03ec6ccfe68f0cfb0a0dc0d0d826791ef358 100644 GIT binary patch literal 131072 zcmV)MK)AmjiwFP!000001MEF(Q{yjg> z?7I8m@~&rk!Dib)0UsY8&Xs?weNdABLA$m81KHm!8MDiGE`LUym~rokHod^{W0#U) z7!G~9XGam=L+v2%&1h0t4re{J6zI3^(ypvga`w+1=0$>UJ`Sw!{|_Jl?EL>W$_)7r zyq7q#v`Jf!bn+ZBmxE=$wvEO9zjb`nJ}%0?z29!{{Qp+UyT6dVn6tgU7wpmCi9{1I z3WI~nJJNjLBu?mh!SIyCV$l2%pTP6;Fq$xLI1&WJbVz=ZVSrp+ombw$+bhqZ0jDmB z1D7%)MwFaIwgdn48-x5zA&`UtX`B0`jsR8ts@nJ;z%U78GPWlq2nC5b1r#190}tAu zFAj|a@dBW09QmFdIFvklVkDcM(iP@ z@J@tC?=$lRFw!7A9EfL|QNZGQTrjU6ivqI}>Y&eg0AQCLkm}hT>E2bzKhEyDcLw12 zrQ7@8o6kM+%h~Pi*>$gbc}H$;$@$IoMYq?zxrVzB zksh*r*^JZVKIF-j@AVxH0Gz~}I}jEHeKANcj(pmvR4OhV0AeOQB)48cMF7UQAVxt?Q#AMY zfc3#3{V?<&3<>24%4`v`(-fBKtgi8{{S3xd>ix#BL#A+$LxRuWn;?fAUpH? zu9th%P$ZuBZm+CQXSZh`?@l$ZK+7Mnhc#%9O=T04an1A9!Nm4=Z6W)v7^jp?`#ern`J z5)ec~15Hr+VopWQ!AhoTsi)Yv5dgXvSMd&=2<5`^+#Hz2Xyi|KbKH58il`( zge|t3KAu@AV}rUXQ5Shf)0>Bq>a1yIX7te}=Qw~+J{z@kq}2vT5DgTRM~g($VAEeH0&;B#H&B{hmr z^|<#NO>ac_NTp&zTm}z>S9}ycTSFGcP(8Wzf+uiLWnI#(NtV5;DM&{tv-*>?9i<*K z+etL*H4RN6%gLyWFQaZg=GpiXOzY9ZLB5csFSFrVUaCb5apcZ$bOra8!r1(AUhPct zb3U$EuLi*Zq$E>?U^pAmjE*dAe5Nf-JzRpx4*{~|3<_eUMT7Z61y4*Kd}=>Y3qxO> zQ-1)_9P(l>u>Dga;~%^*h-%x-9?2|0$H(i?rCI2P=$GAKffxaiEWv6H;=nP1RAxTe zeoTv?I7qXAaC?mD0@7oagXKNodq`{PY`T6Xbe4I^SO{VCpL8Ox8bw;(kxP|6VqR_& zh|8nUB`OQUB>z(v6G{<=kZcD}p)b=`$bS2fLdFbozdSOXSZLlBoI8g8`t95dzqi>J$a&WZKN7tV!cL+%^N_< zJgGk>fJ&yM(;-!D!Bv^&Jz|=t8l95JFSPaq9Pm-wCqVfNvOF0s8N`KvD)2F+i7+04 zq0nklP^}rn5zMz{^tXT{>%3j+3Niy|m#Uac`;3y_BKq`Eqf%nGq(M`F|L#x0AnSQ; zZ$MTm-%PzY&6GXY6_dUQOoPz`&)BdoiS5{m=v2cx`1%@jK)*)>jYfg=QbA$aieT*a zZ(dYSi8rRW(8%>P@z?+fy-esTw55rFSD5eC@23b!US0YW-B^7lr3iF~V?KHZHXjV@ z)!OfW)T$RzBPFtTxRFlFF4bUvn#Bd*wYp*mPj zpV9zua-6To{DMD0PX>efM4hKlp{S8rQt=XD=T*-rcD(V>Fv^a-sAQ?lH)fea(NImC z^1%7I0P?4zps}&eeNLYqfQFpz=aI|tFE|GaorT8Z1(-pfCgW}M6Z}_{18?WOdawS8 zY^>LrH6#QUG}009MJ(q_gqm~kf(VoW&?in9TGLMqBoPCFoB`5N#{tyfm?g4Lkno3r zD`m$O{`a~c7&mNS9=LinE(qB~1L*E#5C);+NK8tA$v8ZkVDr`x)CC5rDHrp+sE6Fd zd-K=O3+gyq2&Csc2I`D@Y8kRK92EyDJ=^|cT@hS_9w2~*9FPP(2gV6-Vf8@+2IMkc zn9)R$nVTILATT1xNYA2&oHGj}@Rt${lg3&Dr(6cb2*a5J{T-MXU+Rr~nEr&5H`+8I zK%>!l0jEslMI<9qE`SO~sdjL3^2Zv7ZtT-KG?VpQD>=X+xcTIPS=Mv9RUqSBj$|br zvcE`2DnWD%u>+QqO=bo3S#s1YH)qM}okTCB5b=>>5HP^Eh4+L>c_OMQiX{qDrHj0Gei5=-Vs{b*pAU)(kziYO1=F8&k1ZUSp_=F;Ym4V3}>W zRDDkXv+RHweSj<^j*?EHgT~2SuDXSHj_&~$oc?1rn}Bim@@b%|2gVA*yuRrXoPxv- zKqQxe1fM%$w934U}ae0@s6KmT*Qi>0zCkLsJS<u~LM+ry40H!0gmhu* z$8trq8l#{-#zAucP8AGT&I#nYkc~I6Cn{3lvwB14D2Z)%iUC+A_vzb4lC~^@&R%QW z13hZdPfd^Gzj|)u6)~@zUJt5eq1Ap*QT$PZaTxR*@k)0lzq@cfj83amiwPoftHUSU zF2vo{ag6dU&)3wl5Knvh!Li8Cb_tgIs7G1{+G(V@;VO{Zy1CO$A_2g{R0F39v{Qs zqy3gaemUAdsDMt$b%&jboQ&3Y!?Xb5{{3FWLJ{u0f3NO_cH}wf9pn^GKqF5d zh)I;*Sy8`@OBiJncLF+UL(c9)$Uz>^7mOg-Vcm`ygfFV5WknMQ)yc9jgqTB5;D1~X z>EH$AxPw3C+Q^*$hY=8V^UtyV@8Dp!{%;#)hWzDtjDb59ym?@@+NOB-^;WNKO}2WcG5w5aUK1c!Hqja8u?4xO59f>UtUnhKso5hz5>F zW!wNC3h3lSNgD0VidstE1V7z$AO=*V|87Qp%++N-9%L{PZ#v+W|9r0WKe8;f|Ns7K z^M8%~9~~X+?0*|&XaC=KSswrQzfJt#+5c9`&i=pevef=h-ah^xw-2Y|e|u;D+b9d{ zJmzXr%NbGUQGK%Y!)MA!Ka2yHgSo4(u1tOvlqIEUW zh7k(CHX8W3AZe0}09RG}2*(d&w>bzy^0y9YnXN-}-yna*Sx9r=rB78udN-PDneb7z zDyWR5(@{9eZVI0t9Gb24c`QbqP8(rcITwat?Y4Q4y0T8^z-;fE?J1Ym>Fk>TyH(G& zJpQ-7P5eLH+5a}mF8+VtWvTuD_>IQ@yZFDAva|p1yX3$BJNUKF9#ZCPwvA=>e{fv> z{(o@1znlNxO1Woo@K6C`4IEtmii^D}m3v0a%2^N7FJO?<$&rg~R4RZ98)8|?n{ zn)M&;W8D8|xBlbp%ar_cA$(J9Eb;#>aQ7woAD-;ie{84hHhS8v{(O^-p1!qW@5)ML z%Kzhp&W4kK_2d7+(aFg!{%@nqmj7mxfc4}5(aHYq`_ER&F8=T0|1SQ2zh$=nS3jBA zR2%EZf4FJy;{P_vBKdDB3s^t?ADM9;06PC(B1 zByc|B1M+Tqxsax(D`Dk+$EP#l&iQDDcVzUQ_1bZdPO^iaUEO>=BBKC*q2#h~@3D>uZ`)h=QtWqljtw!%H=CpgVWxg(CncB}!pT)ozf-0R- zR)R}sfzR2ag*ElExYlAIP&5lSSxdRO0-$B187u_HZP6@2!9{?~7edMdIXEePb199lNA^{CaC`nK5v*ES zZe@yB3lJ2zr_L{so7u-J*9jo1N|Rl+Lb?I|iWP82X8Iyx+qb}{uiTD2sR6r_oa?e; zJ$dGW71F>(6L^DK_1Gm z9(3B+I(3tiHr52!ZLL|G`!*keZyJv=A6c;uE@fN?R+F}NJ#|9JS~arsDqdMFPwQ*3 z$sbd$SYzsJnHB3nrq5OcefHcTExXAWceU0o2oeI|vuFD<-VSXm_hhP;nTyUE!N(Z= zSMTZ$0z(i`U#5YDiXql)r(9B=+_D`fj7tJ8=NAKy`mQ{21osq^yU#=4PKny(Y++TA z$9Jx!E!sCmqf*-P(H4?v@FoEy}h#oqcq^Y8>5$i5!_%vajS2@ zSZotVI_Zu6fgOor7KI#lMn~qzW+#?z&`O}|s-+@(s)H~z6@MW&1G@>wRs0!ry_&M1 znr%XDUR-`S`+U{2F0U^>-E^;e4MK5$%!ktkBxm3(Jku+o8T*{+il!9v^|b39 zQl&389`;ezm=v~vBwqnv%&f&B-1ftlnU(arh4mJsV$*Yx41lo zU@Fx&7jW-73}^$j8!ujsCC~f@?wvoy;jcnnc(P&qWvNQ^Ge!2*O-y%Vv+WerMB*3pGl~Y|fmt@K< z^i3BJ^a;7Cu^Hs|>Q6T}KV4l~XP>&(XYe%VSKUkS3Fnu$z3zwZIk<)DDYzpR)9N3& zt+Bs?T6I9h8xkvb>)#Z$Z_nG$0goe0W~f^rI+>=#NWg3l0!sa7JQhhEn1)#>WSL$t z2#beptl7!D?s38D8mB&iNgnxfnbjmoP4Z=F-@d5h&LA}FIpQyWSsgz#~@&(zbj7iCUeQP?uy7_!zm2yn!Kq2wREazOO^9KAde3MDfaW~L_9 zYxxpiUOtgnBf*Y z=L(W76EM4FV^&j=C$r&S(Xf))yk=%Re{~ybdUK`?PRGG{8oO+oV?~Op=_qr#_I*wi z>_TaSBUd%xhzm4Loz~0cG1jPOf7j%WZHAeSJYJJ|ztMU%u?QbpGt$g;>K-?{0pirj z+OzVqY~hBPd;1fMoL=$Jce- zkrew_usD(`w1myh)l2JiO6ZD5@8v$Yc8cF$|BtyUDv|b@8n-l1QXD-#Hr?+5|C@#gl6Cn{N*QQA&~uxv%gX z2`~31%YrzdSM)Meep<#Gi=h;+6WFYwdY_fWiXbzL0L3)AnN1>I@ZK(i!e+--M`Wvp z39Sv-6RtcqQ;(f!SaDtUS176GWRx50ev;o>d5S@!gD{^&AQ3(jLZK-9Mu}KZC-^&U z7vNP`sOWSGhn(r`<_6%u*U?Q#LCko8FD#itk;6E}+2Li64zWpiswvd6oVhDMOL~@$ zSSV>;W(%oA6m!o$nfdo`-m@#5VX(;*uvq0J_XLlClL4TT8O}8YP`D7Lo0al5{K|0W zh%KItvYX_?L6D4Y=n3{P9>v4iP|$#Qn6%`U6(kf5q>u~=U!HVMo^_srHSj*ZBV^GL zA|m!6Wp5b%GnpL7kK$1RSJi$7wT>)f(|vmJZ#Y3byl8~!aS(5>M=yon+v)7=e|~y; z^5qK`SXynt{|HW?NqhPmGQuYqqE&Ct{~)iP2{sy;2&xHwF=?T~XBG1qpM|-^&$tYc zE12HMT<(>xfJKA_Px!8`Yw)RgR#eOjD)(5_r;m7H{OC5mn@!O5fDIEH>OEOk-_I}k zrGto9(JOgwA^v{ABJ_(yu*YU?zXJ#o@a%v{`lH>c*aB0D(Rc8th^K?cgWyVT(G^Uu z01)ZyI7NSHnxNyG7bO;oQl%lOAWH@o+E|bd$1im0fV0Xm`b<3JzumGy#r78q2qq-t z3Ns0zJkG>}(jK^C;l;j*hE*1mk@Vd^cNEHoV-CKR`*Bc`2}=o|17Z1_gEcF%=lm?n zMq^IiR2Y2;ESj-rI1TcF#n|C6+I^rpRrCwW7~T)hXei+h6{K@wuf&5G_jI2Aq|P0> z9!>J8OZRZqA)}q2fmYv87H%M|dqdd(kKP;TwGk(-ouqM-D0&=lTM&r8>>0zZ_J9t~iZH5byeigAfm-=I#GfTn%gq@Q_!M zM7uy53bO9>`SWMHQYw9uOIY*sF3=>|WbBHxx%)-R33x`iLVnk_q2gUZUvjZpS#wPd z+l)O6vg7%CHK)dt^-Vb@E>%g68qa%VQ`!6H!*s8lRi_s3K_;a-NExLmnQ}s;WR;_* z!@!9wfxH(00pX}avY_YIKltRE@vdsP(sXUoR%0t^z%MRra1e!tp$xY1B3cF;v%M`?2EpIF?aLsvah_$c!a9e;vD2RohmS|_{Rn(RJK!t3)1sBH zLWiK~?eEa$EagrPgaaW}YBBy21)F}9WXwoy`QO5L0Km5VJ+SF#U1B;kgM!5JijIlD zhzALRL3w0s&`&kH$Rwq?u5PzX-|9&4C7+B*Vt;57e@7ux!Gqb@2Fv5Px=` zD3MY8tV7)YKSaSpf~E;nr-CpTMGpbdggUOJ)i6c72DYM0>D~0^ROfqxTGi-zr*-vm zACr*An$@$i0}2L}bUML-g=4EFjcfU1LV4L0`fKYeGyF;(Z$gi0hq&=5YXE!Dq5XKz z$XG0-yOaLcEbezmQ-__0`)KfW$EnY0M8C_UX43H((;xx)n6L+`%2w|r4ae<3>0Ouo z-Ph{0ZHcTjj(ZT>ThtZ}NkC~a45OSVy-CKAu>uXi1Yugx%gd1Zzh=oa1h_3sMlbH$ z3k2&;S6B(I3Hf9gH0CLD{_eN;a%TI6Q%OfiD4g|4O*iV0lqFp-?`8}O=1!OQ0m;Bh zW%x5Q2RGuj(-h^4l<1=VeA22*4zZ3>)Q*)oDPBooqk8xt<#Q`@Wqhuhk-y*9E3$G= zK6Yg}2tFmaNGP}-C&8yIH(^>kN5N;&T*a`fjj@%%#mw_n9lkQchCPQYi$m^1&LXps zaP$rI))=oW&T+5~`LEJ(P@B9$4_}y|=B8#T-q9aqWsTeN z(~19>a~Doyr??L}SU{<7fPlAS{DHRkqQM6 zuRfBv0i)tdM^8hT{F^Dn#n5TU&}=CS#X=N^OsuTNNL2#%p=!Gw(_XeaW@q>|nct*_ zH)wBj4DCjn;awpUk?JjT32|eQW!GN*ndj(5t+VtRkX2r|78p{x)&%}3vo`RLbB&OPse!eEe@dm0_p$%CMsdYYLIq1Aj_#h@(#)7jwg6^@%!o_6WX!p3k32)>n%qIu)n{7}NN32r5L_%Yl^ zKvW9w(!k?_1+Q6)vjUlP{e?{I+$^%px#dSdYdWb0)7}Cjn(N67d&h;*R5L=uL14uy zkB69MpUP9Iifs=xT`bUaMN?A~ZrXxP|3(5y%DA~oF}5oi1ByBkrT9?JFRQAj^>gCK{t*TMdr4?nV z4uj^KDNkCayZll!f$D&+?CcSYlkp71Tc47_?{7Yth;f1D1Iyghp9A;!o9|U@W*w!zG>3O2}(!;}o*WyI8g$ONI(wI?#mN zdL<-fH(Su^v!)tM(@HrdN;WKOPxrsC__ zG+Om4R*5~xxRl_BoNoHjpcL~gb17|J&8fP?`oK|hT-H(?JgG;)4s*s(V%8d3mUOpB zCBDgsgds3K?;L~I$;r+aIym0p&iu9w4tMHY$s@SdZg*NBon~C?kp1tY!I+5n=6veJ zQWxo{Ce}kAH|1r9v1TjFF_j&vdQ3^OD93MPC109dk4i@z0|OO<1S8+Eu=hIa_!{h$S?GJG^#~mhLPyi%bE|9 zDkApsQq}u9$S7#3jR~8UEb_1^0!EiDJC+*-)Bse12ogG7Rplwc&fFZy1c>hJlwC>i z@bI8C{0L~2UR4nGhw-S>j|R~++SR>ksuVJehS!8v@8faWTuB|cpg0)~9_K7F9fw3@ zHB!j@O}b8|we!w7Rq+sbp?3Br=2)CvvaM>QoVllDNY=CPZVn3J1$3xjnZF!T%neGj z28(LGam|hV{3XrFYq3F{4hj#hxS|!Xp z)h~U`vKrJSeyqim_Oox%>@-b#Gv| zHW@{I5mxd*ReVr?p~z%LHv>ed%t)!PKAax}X%hSs(7!+iBu@|x$J57v4H{Mb*$*-- z;#lVHrQoh7mJ8Y)?ibi4uy+?9UX#(DT_0ehZt1ngkz7g9{sPAjs%1OA+&m`Fw?chn zw++qVyKVTM>+Vm2H&GhjjqnN*xVV@M@^})%PpT|aEIHn8Obrc#fpC8}{8E%^fnXV_ z$~^$(xVsDg(kzO+nMU8I-Cg+C_?qs4A?^fCDE~HRVQ62TL;PvX5Kiu903c@>q@U(C z--E0p0%1IOG;CElggJ$Qj&eyuwC!E{JFTXCM=^=Ww2#sp-P|!Vynvb~d_|=4^&n2~ z^BwVQ)Q={EN8pQ$Km#I-?i0c{j8wSk2nUA37KotR!ty*h&h8#9x0y`1@*F0G6AA@G#fB>M>22`eM`gxjbuW5sb3ezvmO`- zTY?JlgdM!!jWkQ7abrTDCrl_1mNB7B18m#9@6AoZ1L&S`J3mQwuy?xbuLm5 zkxcOo@=R7C8H$^mWYUMVhLrW{{KMh%lV>juC;^c+A4*zR+6D&sz#1O_V^GTA{bek| zS^wcazPZn0IpN5#GRQ)56X9KN;Rr-YaFbsUM@JBWVj28W8a^&%ZlSVAvNA{0tlD@I zfgmOt@-2*J0HdkuszqcZ9DYnlS>0XwSJOP0F#j4o63s08t|jC226|2YT}vjaKNfYz z%c!|!6f3oxse&sOS<=uI&|3wXt3Y=pX^+vLO}t`EwGfU9dk+(`T_S&p=*T506;El9 z-Y2sG9w-I9F;RoE)Z7Y%!hqiWNXrXYihz`tYZyciC4C1jC)-;x^|PBP%Fz307!Xp4 zNLHLE6Q$>#Xydp?q;J%@Bj0WSSVFb@4piBr-*=FTX&WZJ+mQhlU4;CoFNtx+Gil5Y z_{HhqU!Me*bYP*OD43BpVJKKLaZF$6N)&gcwN&~TH`P}C zjIt3EH>Lk6M?*-D>#YJ-Q3aQ+65@Q8(F-j)WchY~E;}5OlIPa)Tq0@Jy55{^TR)AD zs(I!E=3M8I&QQArV%>2py*MS!pG|M1Qt5TdHPb1Ve~w*QPxVLG{s?>iBkbB67tO7Z zvXgyrmeyX#=sHV8n+cJaA>prdZ}AARY|VN#dSt>KNgN!RLNeqDo`eN|`}pz0(Q)TE z_;hj3_gvQztdS52jxVs(;uFx;cY;fLc$T7ZDHM(zgPSZ#Yrh1S z1bAtiftAbgdU&Kdf?1?8Y$cE(Z6m3c{P*?2UMZ_EpTynohp>G=Wc3~*JXAy)GXm=x zj?VT$5&in`d6x`X*bk@S?w5L^MPhr|cDraj+)IFUkN|b{XfXuRdaHe#C@HMsfS;i( zQv+CAhFJ=tdS9pxm<>zfqz|1AgXv^8y?+!cL#wfJ9(Ev57R8A&$uwAAShpjiV!Im^ZUx1YMU@6@!y`^+EmZsuM&C*=8k{LAiOW!yZ)KWsE z^&*HlZxtg12yubfqh4NrPxBnJw@5>X%l(s@iI8ZdIL2xZ*OkDw{%k zmfq4#zNQOsH794@jz8jb%G#1T9h`}bnP#9G@87)XXUNiYuU0oPlPX=jm2+Y^Um638 zOp~wCi13yCppt{Hk>FczPtVE9!QpK<7=>Axq*CiXz$44?s63JMq|`XM;uchzLhqc~ zYUkA20%!StU$SIaI11Oz$X0UqUa8(IwZu}84MKq)D=w>!746P`LCRuszbT?#{|w zaj-2LBls^P#e9o`65T`?mli;qwDoeNpV3B4pEF6*Jr-a_V~^p zm_ay}e^*#DLzYZPv1G+zmdDOC z>7mNSbMEOwzQ;uP?m@7t`g88@i@oRHP=FV>o^}8HaX^Qmi~k<*K6lw)VKCXkkHvO` z?sxSuvE*UnO$YqZk|-PWE^mv7X-H=SqU&`7=*`|ZKa2VRng#I5Zwn(9JySK;g{ zK2Rxf-$bsUpT=NG;$^!Q&eSiQe&O^Br(ZZ5Z*;xBm0URaH{={HWee7i{SKziGlQVS zAtZ|l;sJ;t`BywfX-#&gpD7hma-ic&R`)Q1(&U5>X> zWuIK;9sr~ce}vbAs4IgXy+HN?*;=_5$W04mL0FZ7!hv3|gURwjjF>GTMp4zqA%kV$ z^B1`xDB^Rp*+?w;l*obcB)l7jr{GF+O@z+Pz0}vNL)rp$|) zJS%z*U>a5T{R)zO{0ib%ka;WjE6Ac$dO?;y3I35xzJ`-zMkGX>rZbe~L=4_1qu^QR zFN6ze+z2=qN%~Rla(Vq2gnbORab=M9R}w=FHa0h63#O8VXtrz zDz${>qsKNm@*af>BesZP-EG|mQ~=RxvfJqw_1W|Wiga;S+N9P|68~HE3vf@vaCn^r)Dhl<*){}XmINX5!l2SPqoC3j|t}` zLsP-wisgqpMBG}bJ)0;&9AaX71I4vt4^wS^@{2 zjdEci$(n)d>o?n>!!sHjGR`g+>4*PI@?9*7d zKI0ZFAIgo6*!O!R5Wh2C-(4h*m525~ye76QWBFIXqMPStJ?!v#uI4(r>oJ!7d zZ%tn>AywypzCV9=_D0S>Ne0E)FG!-!#rr?bdT-C({`Ks_B#-r2fM&*OJzDAEca?j0usX6uLi9JZ!~+pikJ zzy}EQ0RmSMAkeOIZDn{shce~Xrd%Rv)jHLjZCgLhYR<5LCb8~=0{YXsm4yV{7Ts+P zBJcP^y*3=`)mE@wAr6g;h&#ZclEsHNbZm-*4BUby5!<>i6XjtUn$Eg%F}S&L3~`Cn z&#r36Fm!Rek6`GTzabNyEm+~oCB@bY39BU7!XSk$+EW!SwQYz>sBmxDymWauq2jbM#^7cM>o&;g zxdelcD@ez-ENM0>(yMw+;psB17r&P9m%?=3S!z3uUvRQlz0Of0WfWTJKxG8?#E(PNRJo=-=Eofem2DffI}g?L?0i7V?C?Eu{&(zgheJ%_=WX ztMVYB!gL8xN4xB^khym-^A2W(D`z$YYvEe9W>R7KPGyDD?3ijc-9}mBeaXBp*_^&) zIRD;}tb!vMj#y1EvSsY?D!P!BZqnMIoYn1wcOF|K=dl(Tz{;N4cr%&sY06Ai782ct z4PI4|kI_)XU z=RNiuPR9tZwE>>k}v0L6+%i=bO;rcSbJLJ)UEk%?KN$hHVcy~9v zF`Z9iqVz^(yzyq3(mUjB%L$V_-z$bB|L-eJ!>?5)CK&#nq?rO%#2$#ZLYE|IiqU2o2|t)IqM$A7Otta~p$ zf5y68M?SApwnnF116a5}iuFga%kk?oZd_chea6Mv?cJwj*Ajl*?8FQ_|Y?x}UI6*~iC?So!*QA^C3Kf}ywlQiQ zm36z_G+Vz+EnJsurN5?M5SQf_x)p2TTyoL7uF|nB%b)AFY}UK5@=Y{|%6ZB5F{SsC z-IfzvdA?VxQ~a_>+|pOJ%khn zUfSgk7JjpnymuG$?t*1xb~Xwvzvgd^g~YPm2+Pg4W6s&U+eC|ZG4w8mwOtHlTJkQ2 zrCbcnX_%{rG@Fyu)qEI8cL{F-r@BV2h~4?vw>*2)6E)-ie)?9l%8_Ow6m$V z2^jBpkh>G-K>S*O`D8ga{FUsQ-d6f6IdnL;66X?0t5)UaY}@)Nbanid3dFj1 zS@H*_%e}eFtBlK18P@>6?ax*HxvJ${wf1JiQtShlW6f(5f>!!g2 z9V<#m)8AJvo4vnXS${hh$8W2@ooD{Wt)#ZQwN@@Ewq8hBCBYVY;k9UgS-8};!7)$a z-m?4JmN|vA&@s=(i{3HMO>~7TchkOk)`{9m-@J8@pUY5jTIs>J8N#{^a(XVo;63>0 z*p?;DMn!sUC%#W(F#VPEPl)@j^IO@V}3wQ4he^VU+qnbRT4BI~_Rls|b|rCZd3w5T-zl5Xe$)jW!$ z%bUd}5Zw+fdM@wN;#T~HJRNH3ghfWOd2WK_t9xp$scCJHJ)7(9!ffsalY4!amjapK zynY=RhVEp4sW8)A8|Upgm0-68%V}s+QS^FM+I*B-b|S6)f%^T z5>iZ*oVPRV!bV?UO zP?w=@Y@nDP`qT5?A z?{ovXdF2F7mNM;r)?VbrU=gYU-CU9{`XfyehSxWWy;v!zdWXwYOIV`mY8N#^clU7{ znp(X#N{S1r-C$TWP@%gVSS*Tc(f$SI@0BB5w|euY%onpm!L0u z<9uln%7F!&B}7=rFWSmVG#6n3j1R<_-8W4=s7)~_uDo3PqLTVW#V;y;QE8@Hzo^t( zRQNaK1THO|tH6E-)8<)0|OtA{71=k5T55{my4fWpvW%cm}(>Uxa|4@DU_? zuQLfBa1u=02K-WYk3e<#EepL|MtUBA9Snbv{bJOWZIxcMDjzRey=cupMWR&@Jtb^Y zpl9nAp1cU7DFuj4RDE%nK%9U6A~yqtb#4M0i6x&Bt{zXqyJ2_==hoLmfZW_mJkr0Q>lLz^?;-9jM%!nku#+lb;0tNG4yy zNirk+9;fLHWik{x>Eczq zi!&z>0>RIGs7^Gm;rpMHeKF)S>J8o%j?UwO6Za2-wcxdV21DH+nW)%s(2WAjVq<}I zYRl-Asg1Rw4{PP+s@#2Ko!4F;8$)Xq?54Kg6Cgczo$$s8E&D>t2kQxkdn|(eeSb-SOkjo<>|fT?X?w&>70xU4K7t# zVWF125A6-grEWJIEWn5xHMiwtLPK?i?4FLJO?0?Eso%}yrQyYe6lpzkSDfaFwV?<} zBv%D9bB8-F?8}yX=ipyCx8Kw3DU?4zOh?e<D13a<()@*Ig}knO(t_<#-dW z&l8x5Wcu6AYqq2|*JKx6LD`&w%ifV+7e{}jf7fD{>YhJvmZV8H@O!u+>Rk6}ZUnNE zkX-j@j{CCX)QoR;OstQkJZ)7i=CR#4Ffthi8+!5MF;da)1|Kof;DEhtFr>|e;iFJm}Db%i^L+16(I(37lKd`f81f? z-elKfgu`k{QDjD4$JPG0#4Xy5Hw|)V#Vuix^vt1m=*!*gGs$q^if~^0kiyOzcQ2+> z6JVFr?se$;p*5ERq)v<&vlsyh75N*mHydRPlSVC2 zZoP9wp|@{cR+w@C6dGtdq0#@=6|^Txk6S3=F`zidw{j#K6$>=4+O?D@w7`S4(61Ci zXf6Rtc@)~<_yL5O74G}nTeBukItP7)YE^3lsrYgpshgJ^m#C(mLpK1KGUwR)LokV~ zzf@R&6Sl4tvjOJ%;G6XtG~Z> z@rZWI^er(=h|HlA7eVn9$=N>PlrCQY`(gZ ztGg_zDN`{UdbZD7npbMX#J6u-k`8J(jFP>5Q(HypFTXUKq%C)? z^w(V7D0Z9dz-;348yEMo*-K#wY-P|ln0hjWDqK2}jcc9}pT7DkZV;PKn>0^}t(Mfb zs4KMLsx<7gD~!)ex|i)SUBc|ISugYI>=tj#z+(Y7W`XD& z7&w4p`W=Hnuwxg76lSZ!8U@Qn6!gti6vi&|zWG!k>$q8ym1o{rT7##HDsVt;Va{sWzLw3Oz@h%-?DpxHy2~rMOq%%(nyC|{ zU)%l>F;>4(bZU2N>=*PI$e4{c+oDQxhYcJj@m1_b8F4%jD=rzMBO12wq4&M066Sa zNSrE9q%vs#lv$cih%{ZWN?Mg}Yu>f249LJxOOI9)>(&x6ar|m^*lH>Sb>r4!6xYV$ zy$t1zMktD%6zzae%W6oi24NFvf`bJdK2=YNEnt_q@!Gk6CVp!h>~4v4&6dP&iL#+{ zMWqWcFWmv)rrY6;)NGw4l&mCo<&Uq^0Lgjx7FyLF)WIJK;drTU6z zV2;>mjIJPFRNxseqvK*_<_Yv{B~M|{pdvo8eUoA#%zc5wyS4;;J{+7^xX`)u)2}*j zoM6&nVrfYg@{514{!djCkX}H?iYDV!ZmuJr20IBYV8B_m*~kzc_o&h!kw?;Wxuvnc zc~&V+Y^_h@|6+zC-gMYggZLz&9{hTE>B9$MI7w9Tj5N1cp2vT2EVR7jRPViN;^qC) zDG5bKdO&dHQ%)j9YND&GXFT(KdHDVtyt}5Ux+DFrj~Jr)W;}Cwm1y1jD0;B@jl5N2 z(CwvmKScknNir4xWgSO1*U2X^*LJLihdKG#{P~{#%lOF4{95?uWjEJ|RxTrAi3W{~ zv^);%QA|(I$dI|raUKQ(|IekLn(oMPx_|$g;@P4xue2Pp*&v*r z*s>LnXvPq%82_%c=TSKMj}>r?KQm>!Chf$qINCR>R#1Xat~MsslV^_BKc?uBF-H4C zr+O7pm5hM?8=HLD_HSH5C~|IddA%}3*`TNB2BuF~1zmNCc0HTLM@@_Lf=%2R=+oEL zCg2EW-Cv|x0(MOF_jzQxM$D~a72^{+N6e{W741i9nZw2MKdCg~N^B>UDsr-# z1wpK;52P08SzKbaRZ_8LHt#JUEvsOyGLUNR-Z0*V8by~S-}+ zA{$^I6anXQLRA>z{AQ5@WP_6q+hl{nc27sod0_e~s@}~T`h~{zb@s_c$uU@&gw7j; zb|-o+*SUX0n{m|vB$?-Kh@CPahGmbq2dikF$eXnfJQy#-9jI2pY+D?c71}en50a{o zx8yM2BR`)ViqkorCD3_jdfGayPKCobL3)#eW!pc02?5S=BK$}&WeQABe3yu#2jec_ z+5UzX!Fb3QfI|1|y!s2K9{W0!8ErQYOaunfVWdOQ%{6TO3P~j4&Ih^pgE-1ib&cNu zT{CAs2c-$81!Kj3RU2qD27KE6d@8&VmuyK* zqi{@;CH;V&p~A0BH-XW6IyUX!`~fYbY;!JzzuBGL!*)w-*WnVkq5O7@$&fg%b!uo1 z>FvR5(y;$+g`K-T>R8`wb(DN^kT`^LTXrE-0{H;q+c{Mn3L1)-H`r^dX=EoRY`Oij z$D24Fk2c20yOchc*IUk19QbLQ7J7GC>tlEP!6j^_;-=>@WCecE+=Of32@~cxYaETZ zF`dW`hHP?&`)#h1e1)K;jkkXrfdwA{>!wW(fCJuCsZN!whLnaEKVs zmHoaEOHf{$?DVGbx_I_s4?`;|Ln|guVDbAwyTj_iG|!b6dh0zHn;VV%i7;%n<^Wxi zLdCLUrrByNVse4ac0*6tiU{?;s9yMA|7#l*l+ zbp+O@CiR};wT)Q(fMvbL_Q=7s<>H?m6U5CrTBf?Vtw1Gq?Tv*Juc2nea5u;Ls-gw? zqXCtqo8XXvU15@-f zhkh%^SSeSCnGW~+ZAzpRD`XvhmCYxQ>^5NsvZvl?^CpO3YX0QYrKB;=lG^2W!w*t5 zUm6by{JUN>ct!uYA;G=9J`rbZcuSq|2Na7@!A^_-ar{27^R13w+2Jm()4`s9$-V*~ z)oIML@$UA|Ija?X425ld1b;Pl`4;^oz5nu-m|;`iEvLul>3no$Y_dK26Jb_E!@0#O%h|G~}8vg(T;Lr!rjeDxKY z7*>`_stgzsVZpDgB9=lc7^6rGys!_DHgmcq)wRpC48p z|0tEhhn0@fj{i`arZ6v9-mrOiiIIEfJzxn(AlYZE$bnT5!fSw1gYdemoQK%TZrELE zR+iY*@l!=eZO7e^;?aunKQjgB--|aZ$lc|Wq|+<#q;K|JB}HI1&yK1{%tLq|$*%_% zJ2gC{%-<6MmgMviB7h7O1|D-p2~sKFizPbhlAr&09Hl^bU^S`{N$?N3zc(rk1st_B z!l>v@LP&yAJefiE*u|c{q5NEdr0-{Ig2%Q- zH0tj6yD1FTKFR~i);jT_6N5m&_NDq8@J1sNv)3ILkuUoF2oKRTZIuh(W)7wRa74F+ zS|n06w3laoYdTXV%aKrgU&pQGa9YM~0w=chlU;Bs zmulJmRCP4tZ(G~ZZ9;9Bz}}fTJhqli;naEv^mo8}wDc*r!gI1UR5{@|oom??SNsvH z(s<11aknh@o4N*=AwO?h&l0@3xAYxRdsu?#J(~v7apPo((z&+5uczR&Y~iiT8JssRTujw7`^}_N zjma`mK1|xd6$dU=h4&G%*GS@_EnqvUjOJe#)ja#8K;%*+SW{|X=q;BY}!*+W=hE^YJFFI~kuj;$Se z@}3r0?ET$TNErG?o9y%NA}P8h^>wwBXgUt8q`y-xS=5cM@a|{KdWmd1`HG)QD=%fs z;}f4tX^&*fqcx3v$Bk_VB;FO5EmpC6U$s5auVvdXzLMxzP?NcWHGB;fnA*FWaGE>< z`rhR{2*#CP9$^b8}j{&8JOy-1eW(1)<;aci>{{U;{aS z5A09DMVl`4>F^@6o`~2`RhVh&>P|RnmKDgs;NoTT1n3X>ngOkGj1G?5MF5wG>b;*E zXNaee{(?x8h0yDp1RI2G%{qFmMg16W+|8EH%B^3`eS7kz2lY5TDels&djUT_lcIwz zVkn(ci5rLZ)=u;!uGdt>Jd9Ny0$~R=6x-HqL_65x%O+-hG)@g#nD#s16FhZb_Pocw zKR?3Rl3=eRoLllB^*y~n?F-jFA5NZ*)nI?_is$q72Sq&Ne$^|h9Ar9*6}$p*cmLi@|@87y6Uhi zS%LvY>I)FlRb6 zpB^6xOHsPBZ#63SFo5d>A?OxX+a??RisQK?^napRGVbW*3z^}y&#aazXW8k1PdSO4 z>!s>9fJ1R)fx``&cka{hp7vrUxo#n{KGrKgnJxv(T+22FZ9+=m+=d+jBr1#e(XqFm zY03blJM(E$L zI>@QAvf;)UP5x~OZb|M|w4}95x%{o1;3N?z3yDHaNFpKu=sVG_?fj`?jY*rH(h03X zg-Gi@OU)uOiCb>hxO zy~1vyo<>mfH2Fx*anmD4lCVl+O`^DQ)2UzFp=p)4Ll=G-1=In3rcE-|&H^iZ+ zT(Eu}wrnDLg3@S9N9$X`ao(riWd1RxdDSwPXWaD*GOyf~CRK7fz9NDoRT9Lif*>^& z5<`79%*4p?Sfg$ChIybE%POBnHS&q{D0L6;;zU(R8?9-tHB#aWl9~Ko@8h|^4-1P!anG)W?Hxs^85dC-o+WRQ;HIi({!itev|cW@uQ>Z zN@z*37h&I`?B78v?694RRAw=Cl!|=%Il)UktMMHjv6`!Q?b=ETTC)qHNp-_ZnG6Fw zHWVP_C^MxciK2@oCA~pKWQQ#`_^_cQ6E-*f>W5AMd?C;E%+K8&^pq(%YE%VU^_aA# zdKPJqvc%gg&$rfVroC=Yq>C%+gIWf6mpfa^wMg!TcHb|Kyo1>xUqrXkj*A+`3@L_Q z2QGns9dCJRceS9UBwuFYNzopuEy6*fMGc^HkR6~-rnXrG<}gQo2tz3QkTD=d&v;&Qz6qfN#o6Iz76|nbz)G^eI}(-{w0?djCj_)K3KR z8OK1lp?p#rcmw#bu>Z_=@-i;Xxm3yu>v;!K%N-2;2(Y~U0dpi9RnS?us1cw&lc4H( z9VE7Y$NAbgctQT+fBmR|mRB8C_fXg7%}-s5#BHleSqea-f$k6d^g}S1kYo}V^5YOj z$Ad;l37gS9Ov!tgk+&hKJ5(el98SEk`IQr(Im3o5%KkhqRF}X7@`3lrGMm{gs;>!> z(b*^M*9xu|JQEmJxB=1Ac}URH$(l3E!0_K)baY-+{>0tO2KbUD*6fj{BhMj!bNM4~ zrLHMd}m9( zLPV9FntLWdw)kMMD;z2w;{d@-3=wT*q)3C(iGa#R5zizEMwc!L%xjG8h>bl{Gpyk; z&kme)bu>ZJMICwJVDz6+GC1# zR?dmBFnGOzG0_6-@g~!?)m0VBa45$;(X>SYt6n-<}U z)Kwb4(_8E<%L*pZQzj`lytSBpC+qPgX0Zo1)N&8f-p;aUx>MD)+Q7Za-MxYsI#B3~ zPC^jpXj{Kf%5VNQv(@W62D;@psq-0?;q^|8?P?PAP29hAz74d~z-N-9UPSc-Be!c5 zXG_wU7K^mzxG7~5f z&TE{_xs2>s6;r9R>30=S;zI?+jk-63x6-`_5iBdiZ=eDgBvK#rEBE=T#@XZ z!c#JC66?RZ{ivm)gW9Z@<-%?)VbU!tnI_&%6t7Ma+-&1*cNE@b#KVhE z;PC*2@@O}X3ZaNN!u*kgG~wJak*>iMHXsLu0= zmwME+%hq;xOb#xmpNbv3M|2Ml_V?wk&s2hb=&XJvTfVR*wlv;;%kum5s?HY0Kj+9{ z-Ta6XRW(ftN-`MAX-&O5-!5%pS?w(JBqb)R)$*qW8-n z%9@=`x4fd3dyd~8G|BRYP;zOOp|}<2b^WPP0y!?QAgO)0-XZZ$ausG0ldbDwQ*4#aDGXHp}^49qp+`%K`y3u9AP7>5LK!|rZ z&fVQN;mOuBbjs7L_LTDxhB)y`8{k`_685aN`LLjN&{(5_I4`cAe=uAv(lGCNj)>;~ z1opmIb7<3LZN5DhnhSd}45@fhP1AX#T&o#jVE-JMQ8~C3XujzV-)65HK8uGR$KFhA zMs_e>yNYNE>*Gg?qJ%Aqe>QyZiE`9VDTz;dHlX#^%mBDFRZD6<@d6v64KIhQK%sif zCJwqVSNc8cf}zqJM#SJV%*VHu=f0x9N!}xUb%HQMEa= zm4B}H-V8{_xB|SuXRLKROarxV{-x+tAM2@Hf2c%2(3FEmw(3o>1#6zVG?x^(t^t*3 zw5I_Q$1VJu>_ozyJ7{?O5kc*>$@v*GD36U1-TW1=+P0=Q6|?f+Q&3m*WX*U2Z zaS3w(#p?Vyh!-OZo6q&}4hNgj)}MDcx~fwcVhBMA{4o`O1%8uQ68X zj>6VnuI@6+c4#gs0oY~O`G2_T~y$qn^K*+Wk508wPNUZCjFFO40MG@6W0m59T3aI7ly%2z5+ngU* z2HuNi!6ui=nBw)5D8wpUS2vuR;(uF-Okox&Q|Xb%v623t^THnC_q-sANAuaTO>GAF zneN^LFN(#;pQvQL3+-m{dnT?l8Q=O>sm5ly8R}WKgT`eNODd}d@1#yDretuM`r5|# zF#dal>m*5+roN7llfr#_tJt^6SXnCouj1O8J540D?&=+;bQ;66`_gk(qtD>;5S_Hn`wWN-%dB9%L>;p`CIN| zQAg+~$5h?a)Y0jYb)FsG$udWs6`plAuhNd%hECK%u0)uxQib>*kz0&7cBWyT@VHhx zL|juj(^w>4vV^y5$+X7^O(-t^wzIG*ry|BngLQO;J!oymj&>~|-8q%5VW_Z;`E|e) zG}9CFFZ|pfYyqrvh2atYnnpb9_=P?Sacy)sG!Ss92AC-XjG{1zz&?c1J^Zm)DJj|K z+=2=v&8{oHrLmXN@p{T`aeO3vA?Hs0t2au{6%w49ic`!(jlHeKi6?4!I*){ztRti1 zc3mcbPg&?wHX0ArT2j*Ax>c=Y*@jQ6+hSRqxztQ2yio@`MKjzvgNe5dn^j)`IZvvHwH-Gms^WMO=8ttl=M3Jy7mGDbD)RpPjft-ntPKcok9 zc0XnM{FUitlScJl1h3k~#s9)RR0`kWp4vbKIdDOx61<=RjO`dQxRWjK%_p15C3CVN zNFE=Nc$-C$R*kuX5_~=WTRjI(_d&xWs#?1fFrS~-`F0!)S5ORD!ZM}@{EekuYlvUk z`Y&+ptVBZzOR-paTnj~^XOuH-rLz6_nu0Ef0bAO}xKGbR+xNDutVwph7hkUjeh9*t z|9Eykuj+gH{g{m*BCmc3&Mu7%C4@%*#(NNzEU?<`=%gdoBhQ8pL)nDS6ZJg!KvJZH zUKB_{qC@R5J42hh48EUV#q*Va_4~pPM2}aMZxkr?34Wq!tkUWcct>xS-b$-jAX7E_ zf`5&VyhHi9_NK}`Z0v9K!Ke6EyeJ5rOPi}3^hw|qG*l`I zh(zQtB!`3ZNk_a9V7J=#FpEEf27wzQ*n1>A8@JP8xa%6VZN<|$oY^{!Al5|+EC$W5 z6`re0a_1N6B#x_Vwe0l|8an>Y_b|1F9qLWQtKCt>GaiV$vBSysQC(()HD-F2_o$EI z^%lpzVLd7A0P3`1q_-csg*KCFv5yB$=CdPpfgcJ-slWi2mttTx+REgWkQ!so|Lyy! zr;u5|qW>i{`hO*STFfaiIaVaT6Fx-V|0R5){2Yzd6sk#?kRhKJPC0mZINM1AtX#I* z#6v_E5t(sEPLbD<5w8o7Ii4sI1;1N=-2lIE@d$dnz92M3nETvs_ih&wF-K-(a=N&g zDm^(qXG6RCldB&zvSSH1YQ{F`sE=1I!XIng_*Ch>43IXr>=0L{EuGXoC4FJ`Fr`6% zm3!jMtY`MR=c|3yU;1&)2H+XL7W=5JjnO!>hZ8|4a6~U`h-T!*0rHI|`#q6j4IT^* zUCtKtFvm!EBnI}}knpa*X6Jpk^#kkfAsYgCP?#h9nZ=oqccDieH3NLIVxad7V|&nj zf0lp$%p=yro*?mt;MKDeBeau?e8;c={MMc{mRH7P4)HE+itq=#m5hX&)nX_bgYKVZ z@W#2Bu5)$R zB4q0ShIX2ded`v-EyX-{WXw+02+$=iDkhg!??c?Mg|x6zrnf@_&B;9uDb5E!xSc>? z<(|5bMWC^AgN&v{jGCaOqu}ZFVEOq9@|v;|4f+ieEA>an4k2WbODfGS3=>2YzP?EE z>t3KHPLJ^Z5yc^)bM!l5lh@0pG>B8uvSUre%VUut8g&)%1?z$hai}L$Y8d6>Ab6;U zA2)rZ{KbCt;}N>u;%PigJ?X*sE9J?jRMU;sMRO7*aVK3g=SVk!e~}k3$RI->#@4UN zNQ(E6Hqf+9;1>`Ek-xBOb94CsgC>g*d3hfi)6J8U0LpUB$mSdnMqdj~4u_|adig{> zqGZln>)nlHpO zS;|5n*J!41NcuW`oM?CM25|naTM{yUx;l{O{k0am&l?d@T;-Yv5AIsjz*RrDx1V&g z!-@@yqKF<_W}#=N8g?F{QLIHDqR=J_MHuD|dd4E2gjmK{!y}e}acSj61{_sS4NLyD z!f`5|lWLD&BlkiTVZFhf_qI^@_#CqLjBhIu>w3I4R?}kvGo})7VL=|-#n0NF+3)al z@p3AeH+wB~5j{ExMp}|e-Y_OjIyr^*;QqKpZ?!27el(I~*w7_pEA_J6Mnlaa=(HJX z&VWt9K6`O+(v$NCTVbFlL4Hxv#+JwiNl9`$2RHY_0Ee5JuX{*J_SHXu>Wa%5!~ahi z6e%^|TRTDIc{k(HmGiENz#-?()CZOG4$x1R0(BR`u{voqzeb)RaYTtzT7(o_J=b89 z37_wM0w$TGX_2koQ#$T5CSGIdxb{d7fIC+UP}^HTO+=ASERL#XjAT4}9wQ~c=Y||Z z{U^Eo0CU~X@5PQiUj?2Z&Duvev>pkDE~{`di5J8>j3^z^n3*$8&liGGWoH_{M?eu8-BnO@Sqq=3J<&uieicvZgc5t zd@x#6&1n!Os3J4S9Fd3cK|#GDmPIur>7S!a4-v36TK<#IeI~lIH+EZ-7iYwLn|ku6 zpdkqkDT-TlW{)2*)5&OcM<~VL_6uPE8J$!p!;;GWj8VL~2GciEQ|k>)cAOqsV$ZFcUfr z^l?Sv%WIBR5TP3hgI!>ACNBJgDjY2Y32uP1wd2Vx(x!>eR{R!41FGeo51~ z2?Yx3`;cWYu%1So4}My?@eoIil`DA=;v;fL)vBdPeb7aDRmTJp;)1mkp0PB9j!kF& z91v&4=Z5W!5asM*gq&IBE}>?J)YyRqvrPM7As=$s&p}23p#;sg@{+`1i@-|1ujy}` zR#?S{2{-?2@Ve|9`fegD&t%A%M6cAA6cg1J*aH=AM&oXG10pQEO#27ZYriMnt;-SWbA``a^Mf$EXKtzua% zBEBJKnWEgj{0d_AQ^&x!|HTXS`SJwFfIzy2VWM!z0rb6ackXve^G&%oMnR9C&VAd7d%lvIqAOF4?88^l$v`tX zu}cvOu=qzu2X6IJpzMG@d#X)a(r{)3&BfAe=Ip5lU*WkL-~0WEtf6qL1CWl41QRd! za=no~@44dP3u$#w@Py&?r!~8vF_K8h`mwoceS5tZ1cfL_io_^mJKP)0#fymk{(zaW z4Sr!~f_K7(axiw+<^~~GN>3$zjv+>=Pc?eD9!Q$~2y%HR9ECE^6)4&@59LXe9XG^> z`8mWPBacW9=bPw}SEsPo`AcCevteQd^Mru6yJr!8*cwJID)Y!9FD=MMo4q_+7~l}<(0qK>7pVD1n0Gv z?W(RzOwtT1k81ZpA#lPP8V*@JB?95 zo5aoC-z={I7k_?omKV0zbx1i`7me_*B#&8&w6lj95yoQaJDI_oY;hbPvkt`+#=osO zEN65)48A!Jt@{fH;>7RNXS>#(a!SK&+rES}eG^au5zA(6R6aR|nqVy$3uc$s)@*Kf zN3N>%K)koS%LCwlckqahwZaCu~a zE@Jn@%BF;WSNzDJ{+Od=_=0RrjYt7#n?o*zPlLyBr} zeuEhq76i{^AVmk#Uz1sP1~NRttom3@y2`k>mIQ+0e#$wr&=J`JPRPxEBDQdZU|a|) z)vomUDp>rHsig<-l<;b&>Qd4;B1`R`LDEyC8sY6Gh4Bf(^~p|CC8$l8{nMS1?+#@C z-Z_U1;eNniVCF!55#bV{S z$w6PQXCxpaP3(X5F? z!T7^7=Jz5F2Qm8^V315?utY{l++(Y)q$wMyo%I>(={xQ~@P^3GJSef{XPegb3#5FF zg&#k$%g`STi-R^*k1aoTNKmPQ83o*}NT;;hDBw53s?sLMU#Y(D(G;>v&cy|I zfkk-w(rzjv%|^19V=}8|!2H9T2%P)8mtV2nbS1hzee`^sjIff^k5&X1d~7>Xq(Ogu zs!H%Zy;(pO4 zez^c80-%$ke=)0=$5U5jk?X!DoJ~N6xEXbm*aisKlENfvv>Oe(ir3|lUHU=?eQSPKRc$}N z37Tud#W4Sp}QyDRel=fBtixNEVo03CNej9wH+|?^GMBIS@4g zYox;hM6d)cH4&H(`K@_flH@|CMkDzWzr8rsVT}C7rca2{qT7no0!mIV@$je|Uxk$m zyoK0}Lcx74ngtOcUR)+O71C|W(?h`gy7w{_iIGsuC*q5dGG*t}F-%`R?3(Xcf~orq z5-|0q^Rby03l0renN3J|viqEhCN0?Bk*o5X;bC~ASL5M$s*EKIj?mT;qn1)wsc&U5 z6cSO=obD$0JUjkf+xhYH=R^O4HuDKxbsz?|3sEUy(`T-fJB>w#+RRDrP`H|(oP!V3 z?ie3d85PsyEe}>}naxf2Rt}VL`u?`=0qQhet1MIh)#Ba<#X-|%A|9xxC>J>?FoD2% z9(e}pIMJ60|BEyi?)w*Nt|m2wl!t$#@$-cwnEBV#=wt!(zBFqNIqDD7KXqoUij-kW zgvV0Pi}e=MYN*t)vk~-J$pigB03u=nE*P|CYjJQ#?0r6j|L($NB9Q;j%z{uKae$P#q zY8FXNCa~yA8A$O#$v>47O_ebhj0-^Zo>ig<*iLL-Fc4*L80>-R@>!UpuCg?>`Oi2w zBMZf73MmKb5v;@%R@EckIBP;=9q-QwExN>7ZXDJS^Z7tB+nL6eRd%d@u%?XcUMBWc zN06?zp*FIvV(%_!;E&q~M7y5#skjuBt%~(R4?{w_r0YwK&`|F41*RlKscSOH4MfLm zPJA?5buN36a;?3tNyQ#^*Db-fhiH(%UgtFHLU`B8g{?}j7LVT(dv^mCHIxO@0ekSi z@V{p)Rw|X&+z2RB>wCu(8Qu`)37UJeJF#K9I|2mnPZItlq8yXx&<O0viH#5 zt4O5JM%x!QixM^ZCa-TTQgK_?8;7=l$nAIPX&`W{Y+Z-nYONx-(VkOVL z?csnP1LY4^u>!lOm0 zq7==$`Tl5lpJ6qt-BW;IaR#bJNfgW#YVZJ?c!A0V6o?jdkQT|qm7FC@WSNzB=5uk& z&xE0m#K?S#z5aYYssceSq>L`&v9XXz(T1wKFK@`fu-*3Q8zaRPDr)ja@Tu=38Z-_F7tM zWn7QCPZSj$1q03v`r1@i+7}|6K9QH`c1JaP3dvzBfg3z8J4sE^iQ2up` zyzPG&^f|=VVRC?qW)8#$03!x!V~Be3~P^WPE2wAS*Nk z)E2Ezf7&LQz-mPQ9f$TidQ&=KjbqAZxX`xlnP^RlAfbP&=$Z63hjcs9h=g1GY(rqT zq3(;6ehQyA^HiZ=U26SWSwUhBc%-)fK>n(&5Hby^ZHCoNHqPq)jB>3*-1a?l)QKUY zHYEsirlb8LD60_XC%(3;*9%P)kS2EJqT1F|%aVmX;b@6%-i9n5q_3!4t5fc#96wVU z!7I+Hvp=Hjh5vo`BtsSHEWvb17STG~QPPMdy@#jNJj`#*riLuZmwH~3FhO@+9*{%L zLCA)uY<0Kw4xBuyNLVN%9hbO3J9g2KH(7kIa_dPZo^RkHHVUuo{_#IuK(RspbpaI{ z|JMc7V*FnhP{qXC?ArQUPg1kFJHxLWGjtusbp%iJ7)B@^vvItVTHo_42h8yi4@80{~hU1!!L2MFxl9yk^&M@Hq6j&uY8QnSp!0CTpzF$`t z(Hi}vSZJ7b{3y=!+yCsBJw2bEYD_URuY5`1`b|1EN0}|n5!hpd#T}dYXFzXi#@c$* zkkmmHRQ8|@#d}eVVHMi;DNZzYdAvJIWxZty((^iVWvTcG+-HHFuN=jN$^!K3Bc&b{ z8V7rZD3QjWo>IUUhfYtwvM$>h1ust{Fm=(HoG3NYVB`DHiVR1%7zMnQU%Z_qw_6@9 zzvfokqUf5QgKl#e??ycPDHRusD5S6|AiMxGuf=jl!80Lk-M^%N(7AX~)kG$_en&1; z?DlLI&4*Zfw~dE7D&XErYYi$5sII6>x*GQ@4J7upl~ay@#I%o4*J$!H3=U#U zGln+9ngp(B=B|pwr_AQzhd_r{9WaxG2gqo(H4|3Ci3^+OP9Vf{J+q9op6_FUyc9C(S3ELj?pc%% zRAZI>(Y^fc$f}I7PERR3(n%aIXv(4<^OUz7fR`q;VN^vMg5Jb}SnmFYePxG_FZiKf z+F_SjB0DhN13f7aPDc=q%+l2VnvIDKz3~!?{f>}*HR|oOV~pc{*4KUb!tFx2@VQ;) zzJR283yUNZh*%G}8_SXJTiWj(Qwaa)<0n)=WT^~k5uGhsQHucEXk`SbuvU6flnrDi zOb)=`4WqfNHU5Ut>SVk_mE_ZG&(eCjzZ`b;3*o1>of_+gd5K1cmg!PtoZ zan@Hu{DLlS=s&7j_aWS`L&s$V>(ccb)beBVW|A*06pObVQFcHMKe%s5%9-66uPFGeYJnS zytrS5D%^9@V%XZ5zk+=261n1@PzAn>Bh-KoQBFCn!&#_&d&@|rSe7z%LSMNYZpW>a z`kX*rpN6)nq`}H2?nS|TiG2O%15t(;JQ5Yl>g5O(vO+A+m&ylSkwnOMrg_L)s^9nU zT7TN(aEs{`zqBh3k=q>tqnqH6DVXM=dw5`N{I(hx%wPH~go=c=rvIuRl2|kM$zXAjvw) zvY!4dp>$fm#Eu{wiu-4yqu1S?s$0BjfOmJF%&t&ejr#SPtA9YEy)BZC=nLmN5fY4Y z)GposM->@;W0MYXAKSOR9~C61HYHu6%v{R3)1+`I_p@3a@+yWFu#V)n`gn1BlD zzm9P-@j+u0fEe%K=sU&Tii;gv2`vWNWk{{=#(<*7)yv#KLbQXWy#4=HMdpGi8M-Qk z?lpWQlM=mp@e5wA@HOrx?sdmx1!%lS@AqGX`CVBPLW5(BHE}+F9Y*0 zzN?s!N%=MmD8i9R&f~{Ws6v$*`RDxI2qYj4Gmke>=+zJQem#)Xd~SMJwICwP*%R)b zgm3JgdHMhymQT3OIulyS*Q4=koAG1&K>GA{nw#tOQI(EpO=6a0@PDI-^ye!uNSh>P zilt>z{k=cq5;z+mGSB-+bFgYo{TmIl!|5t)Y!Vjp&Uh(&a0r#*{qeDaDkf<0qRJfM zF?LBuNRg*;_jLGjh`F&V|9VSj;xUexBxS!--tYrLfQ?Z8b#CQTP3t+B%C!cZ9hG?H z)#g~X{9Og|gEJk(HO;YD2*EqrBb~cs-kg~jYxE86Am;XEO;@IDu)MfBe#XcZFkxI=JWi|iI_7J^S&_=^CkDr z{b6TB-kJMeYhBm+Ey(8mh@h>;fpSU;@+=BjY&|0kqovzqX}J?Gm?X;Y^qzCnozUP{ zd$tlCNV0587IK*Q?;@e$%W8ZG1F4_5OuknJ`->a73|(BAjRy%zltpDoJvgTv$KGuu!#9Y%t|ye@OTpbL%|L zYm>Bj;fWh!QNYi75C~fS|0szpS`_ffQ~tsCrg|K|$lkg>Y0HzAL&H_A;HC)RNRuI! z{yvfpg;)1S@sB=Rq+|69bM`~ohKY|@!F8HUc@@&k3Kv?e`TPt|2#7ecLY%geseQFg z-`ofZ1^R`g#G&Lrq^mV+`}5PL*XEn#q@|X>MyM@y!auSK!X~v9DzTLVd5_NYwxX>JPjeen-{P`Qu)y$&)Th$Xp&2HQu5 z#l9XkEMLAq&98GLu!$9`^X>-pw&ndSfAHH5Pgqb^4265;xFdP|UGAZ(RXyWX$6H>XbYE$!YttWkyCo`f)0y8ei#)PMK4fHNPqX)} zkeH(S(q;C&o-&dFRJglCCJhcwj|q&Yo(uhQS8q63{OB+Df7xZrL?lb$+Ti2%J;}>` zl-KlpCnZF6+A0}>?-9hROFF9FDU}G;oi-$aOqtU2k1!ah<0O!28iWOL2Dz8dPM8CV ze{$JnCp3~=!-Vad%*8u66iYkuN{6AX^ve(*gdXL}&;3ZpkRb5T^mDi?p@Nd^^f=;I z;>gh0OYs4BhRn%c2di6{qM6won&^(90;B%NeGv^!sp}DO#8ou&w9lmwKUe>q&`Oqx z7e`qBGJZy-u&9+nfDdqA*xV>etjIQXh;6+^nglU>isdMmjO}|e$%1yICGAU4P2tbJ z6-o-XpM9e9eGVdA_c}&NWdoNt9uTv+0QU)^aNT&2;7XVdSeCk??dR~@JP~LrCZ#~x zT=jheOZJ9%vQp;t!yYqZQVY;WvVjeVFZaLM_MzWK*Zf!4#$Oa&dnH zBzS|PFPOYh@o-j&&4!)uBM367k!W*;9B#uuE~M|Ob`k6HkZ525&A(Q6H-2~TU%Rsm zbu9UUJ(_r0$UJWHnHLwQJFo9YQPT)5^r;iz_2G*`jBVDC_l?HeiUBGzKTn7hIy)p) z2dqxENx1PExq%x!ks`+L5`F3ENc$2a^_<;P9wNlGn_xPz0Ya7!90tw4CdAVdwvgHX;patQYhCj(_v{aEfrU$$fm(HW*D3|Eh7X zn!$Z9^7=HW=^BRBMbx#|ZmT!>h@Q#0Z4G7NDXbDSn#IwidU6Ck-c~3H%r&RG$;RcC zOv~V%2=r^y1hs{+9P@1~O)00D&qpHXIR+d0DmtexpN7t_noWvw?z7Fg;r^DNe{W4W znd>8_)Nh^h4#roC)h;OE_!H0nZ7Q3Fx8>lzI#s(!tdomSbUthmL)C#$Vm@4nF7RrV z+I*0ddV*=U6kKI#bbVB%<;4#C4;uWP1EKPKxM~fdvJwRqp<1J=syHBxgH(5w@u&_l zaJ3!2sI`{&6i?Q4mC&SX_-5dmDT*&y^K})<1k)yzYYYVyVhjC%@+c&J8iaFB;IZds zHq3sF!8Q*22)(VE;rt@Ma~Rl2DocI$E1@=Rh8?)5BIz?(F7he~Bs1cVExT2mKp zlmrYz%M~W+)iVw@_S8}l%u`BoF|HqLxzQ2AWzb;KgcpZX`qe5J$~QVrjDUsC zpjWJvCu2pL5SsChf!s0S$r3Bz#u}5k$thNFrJ-XI{icr4Y=S>cv5Cj4O6b1nxyK7I z>NN%#7+>4l&p`5KH`-uxs&YRlM~62ErqAYMp5|j%guU2IsM(77ESKkF6p)S4NuwoL z)g9Dq=3*!s&%{`}mT3IV9>aj~%`PnY=bVD`Fn1~zlvoAuD=vVNTPaEaY9uHr z^P0m4K`a`WIfh647}MA;B0kfEtMnFbSKiE!4E-=5`8eR4XY@ zu~lJZ94oyAj1IT9sn5(l|EH_)ol14vtvNNKpdEIP$-EQJDJq)GCZCKO>Ks8@9X&Kx zH53D4&U9ci3aWbeJkRm3@cEVCd72{{+?ptNq^E?IGA~^hvg>l@oVhfJ#TlkIjy)?Q zNWQEK*cQmiwAC`#1LNwBF?}x;4v8NPPdmjJ{bN^Zn0sl5I2^INoL6L7}L(&1(H?2unps{zU<^U(g?6R z=?qIGWG*9O(JZ5`5ybw&ib6z=xxdwe2h(h_FD*N!+J;;b(LRhb&mhzjXD9KvMLbw# z&(%sgX}|aqES@vx+614o74S0+`xea9{_6B$*o5-sKt$6vcL2(V9}L})W26rYQ(vgC zE)>nYcP2Wcgg4l;!*fN%d~#I!8Qv~J1lu*TsXM7GRE*&g5`^1Yaz8h3CM;Qd5eO`0 zy=#ruFH^gildZ4{sdcgDl*%F125N1sRemCp-8G#+JE~?oCfq8S%m5{Su|a{xGQ(r6 zk`Te`KXm?xGTsT4z(NG=lXAz29(4drY2O#Az)Uj{@>62|g<2`=rf7%l4JWp%djb^o z*`}1GOxQ8XE`|dykwhxI)JQ3yfTEIVi~3nG zlE^{TZalLt|Ng)k3`@+lE76n?smPTY%&^5w)?srm!MRQxPifyblXs;v+gN61@M6{s z=~8iE7{~ldH0Oq^cR>5hP*!|GNUuy|(q3F%21NAZOU7a+s-V%0qX zeG2AC@_&BXZLW7WahAb^9z-#s^WnioB&jaSuu9r}Y0TX1CmV2yZW2uYuu`||y;H0- zo{@E`=ldJjVks2oKWs8ilyf5eY_?~WC!?A)d=De!spZ~|kU!$r@9 zFA?6*+LtU(DY>O5ft$GxRSRo4@!TX-e-566X1&v<1)3Ju7xGmO?f%5N_KMlhM@y$># z9R6-J5Mg4A*O)j)ml}Mwz9-&${7mJ1X2YQuVHsl_%sMI-d96UYAKiT{{pZn6Afpum zc(fPd>B7+Ya?phYcilQTorNh!NklU|qJ%AjCh-$-|DJ?}HZfZDj4WluiK)2?a-nLv z9{^2E2NElrY>nYJ_wEF>ubEc0=h1;}VBYmY!ztPbWKhQl>sm8zezQ70n~d#a^BnpU zgfvRAyfK0O4O6QQWUB7sl=d67I>KTV`rGJy4#i=hNZI0&vA@b~HC3A8!=~=Sz$Nvf zPc)iB|F1r+(aZ|7?c6pz>toMI@g9q%fwyJPdVs-VBGdM@f8vUAqC}A1P;BDpC?p;b z*pF9G_(O4h#q1RZNY~>ZVhtFora@^C-4Ye*`Vq+K;`;ffx9_I;7>IrE->3G1G7c2X zRa2TK->mg_akeW(!hBIg(Rb0MuZie%fz4}BP!-iZu`yX?%+C+A+iWB@YJ^e#Zy+qO z14dI(5=2@gKQrgT99JmCJcCUpe%r)agyYeKT|ySu#TWc+ zFsV4g=$VLk%_2%DB8-JTPZWvF;t+Qyw%&l^kz8q!9FS{o7yHqfOD0-1)Sx`}nAp<{ zZ7gnSbmSDi{6c{vR^fa?`Sil9G=7Htl#;LXAX*4WPDwV+&`<4*kM$%}+y68CLpAq5 zOwdsG(0`bqs;k5{(k$F$EXM-a_`_B*ulr}O#}5V%pb8>^rZOZyiAJy0p+aL+sxa~6 zAe-5MenXa#E3wisxXzIds2~V~tu-}>cIPz`+W|q5Ck)i6+h>gkF6iIB0Gz6-?mPju zpN5g+8{Lp|#bmMNE$1JQx`VLuyP)ZWg?#RYqm*QcJdCes5U1^4JQc_kbZ|!#_g$}V zME{!i5N3pXk7UrXUp^AXTF(LYJ)kSE3#Z(h}96@}{vkcC@FjUP~cp&o;G zY@ROyo?qX$7ML3A&}{DkB&Z_gb=16iUt0irnr`EIU|Xqjr(wJx{NsQ7_soI|jx3nS zCw}vj=$EfoM!Q0doPe!_1PjT9#hb>H{>Qx+^FQ5t_*rd{4JXc2l$(_O4!%0lnucTG znA^|CWzrDlW^huPF=*Fm0Y(&09s1gG7(m>TprS!O2m`B%B-9XRSZjQjNa_}qG`I)B zwiedvggkR5T&FIHYVII^;|8s;+7%+`MnlVRk38#qrJ7e^Qi&za6TEEqfa7RVks5v! z;Y!K#L7|(ES=6RxXlyafub8TU=cM<)D>eQIz~^4>&W47mIQgbhr;8MMhVdQ-$Mh2; zTUkA0-d2hV@;XG#@%eHiI^XGKsE~r!ow(QikYY?&9;)|7W*|5RT5w)k_49iBFWX+^ z|I@Z7=hRz3@MwF&!Y_Uz*Z$tWt~Zyk*H{)xm&)!nK%f==u9{0S-6|iP%-K0mgbD34 z^M7{j@f!}#>$SW*25c(18a-oAz5{G~Xs-zpS)oLeFaO&1RKhg&2C7rFH1l*bUM5@C ziCN8j2v+4f6O?x{Wzytb=h)omKP>AK6ZJFur`;FPV!dP^mQo};s4e8P`B(sKVD@e2 zaoxVKF58tvgE$G5APU@%b zf9Px!gJw^>yfCOb0r`;RkKoFJ;ody6r_B4CJl$)qa&5_z4{1aj9JlYK@Qn|D@4r7T z{!%ZDi0BHzqtng6E``RE(ncM&zCh_!NLDm;xs)ra{pEZQ<*}gWoN3+SRo%QuX>@5F zlvFhktm=-$&}mn{$=k0l{G(8)F26Mlzr)@_kChAV6V9*%i}q1SWb(|GlK2NA@%j(E z9>mqqLOFS7xXir;EWcQ#gQeQ2Vz}aSZ{d@#D7i=qS*i*#r|yuddl+BU1@Hug)k7B>9)$(3S(am%tf-e+e`4zcwRe>96UM@ z&vdYFzc3jtp3;s^-%K^7%vsT)UwWp(=SmBmG3BvU2{mfgge)LgU zu3ickw*L`_M)Pi(p$*^0eK49naP+C|sSNWY=D4gO&BS{vR`1S88F3ofQl^>H2;nWUf^UFV{AIrR{uZGW;HyTZ9{a51I z|J=Y&&X>t2Iy=%F@5P>CXS$>}J5ovka7@XLS_P# z^xmCX(Ph8cXoms9L^%4F-h?$9`oQ`k$*%J+lf+)ecTJyN%-atUKLfE1&4}CR!XWaY zW=^4_A95;Enz7qIyD?q2_j+j+NcFQX1!-kI|ZP z{}^!{59Sy|uU!nX*&xxK^dw*zSalq9LV+OU7THFJJM@6v+}%COD`iD~`R)c#GJh>@ z17FXtP*5W}KW#wcqx6cMk>{7-`6me^v&8qCQ+*6)B$mQC4Wl^kGuA5$zl`FW+S{CCx+y|t0cISE|QWaT3cQVz7HDY_PT_X_0A1g%a%CB zBC3SrI}bZPYs`=#C_i$NNrQcl85+hANo>6jtKq!pY*@u5K%S$*NyETtaD066ry(8Y zQKH1l&4e~*EDDpMsiigm_Wy{!i#aEdM2jAT2b9BMN1q8Q^2NP8*fG222_GH$) z@s^I6UO71s*QlcpS&rnuJdm>liIx-@w}yN)EVV{U{;ZMrGaEr&``n)MFbzmNr-pI$ zz7SLw1y{RJ8xVB{Imb{TZS!W8>9mbx49}>x8fLDh<)98Xj}hsNAHR%~Fx7BTtO)gq zmciJl!=H-3VV29BDs7{*K)zc^kG}-W$Lbt+6g$Gh-BJSGbT5#q7dZW+l=dfdKLFHJ zf6l9!AHV#+2WDGEKk1mp3Tq{0N1w6O1IbB!98gwjW<(3vWhI5Dcu8sbNo59B@fFmd6-+TXFYlWU!xCm69|wO9%6_|e=|&!j&bK8gVj#gK#|z}Zpu!MP}vZBQ!{aAR+t1-V?| zLs6Dg$|!h~$)lrvx#I!;rJbdh`p3Q`=D<5y4eEf55N0}`>AgKgAq1*5?3JzQm5!Wxe11MZ zkB>asY|}SMpd=^66}Lzl0*cp^MwTo(Q1!1Er6R&F2^jYGHw)J>GL1jxoFlRhRP{-m zO+H6iboO5{s>$KNoUp6ASS0F^>MZThA%HRjp9hNNZQ&VcaBwC=mbZ-@5b!mxM8_rEeRBMOWFPnp z4b;`r$K(tbN%f+3zU^CkAYZK+W7`TB0}$R@HX$1b$DVme&3SGBVw7201Lm(~R4;_n zDoEDR0;M(0N=JRT%A@J^a`&W6OzU?x?k;!lu@fir$ALpO$=JM;E zoFu*pT0Ug5gxRP_-jM%v>>P`Nx&Qo6)bC6EKYDiX?b9M{PmQVnjr#fU=!^o1(@k8D zCp_5T(1x)R8nCE0=#d8HpuSbJ=zxYI-tB^rWuXm3BTKLfObj1tL^X3ut860>S-ut< zpWO>K4eed$)!i==^pJ-%(`8pP)acq6+fAVt$lj z%ynWB=Xm+K-W>fSQXL7WnfohjrwvwfvIa(H5dW%%$Jzy=Tp^<#W3n?xG572rh@MpD z(PAGl;6K+n8(@87xODYI5p+#0kUF3V3RFIxD z8qq0zOV4iq3sBpNwC0{heHc`7soV4Su;%6Y>e^tIfn$MmmFgaSW z(zMCJp;KNsUr?Or?Zlgf@Rr!~!%aga(I`#j=%&rENu~c|?M8bc{7rE5n}f)o5LY8-x$_I83Rf7gS1Ev)v01&^YIEsE7Zwe3earC3 z*D`g;`Sp?hox0z))zp1x@S*!Xdf%MTK@&(vfmc_WHoX5j*`GYNp+ZwA`NW8}?63^a zHMHAX%yjT2J7Z<9XUl!H+gM0rS78$l8DPd=75BH^I8Vo)Ga!k{=$5D<4*{f(UzqsT z)Ps|8TeuG@@+AXSi$M^0ja-*Y0QVq~mPPQ?X90mn?jH+%#9TkXLeBxazcogVkP=m% zy#DyDbVi&vY|bWH%XZI0HGzZ{I+??h&XVy86NgqQS{V|GY7&zxI+rxk^#GB!`YV)yHTSgGU=BW#| z>mPjg^G2Fjd_o8aUkol7l_U3(L1vqbN^Wxw@QP0i@%Yy9e!PA?`!OOF;eouJ~ zmHDu+XH!}6nD*#yAkqxccrOeIvtlLLk&HbfLK-7qw3W%19<<=Tv*Jc-LLI^8o=k>^ z(tTwzI~arY+HLYSVJaRxEX>19_|BW-+e|n{i>U#}i8lbP_F^VnoMe%P1M}Kf zv)ZWENuWh)90u?aA16V8+q)P&*gNCUx9oH_Flge!*6H6iN`m{$XfYZlb#t!TpwcU}zzG+zGY0(zDE6-16<<-U(F?Eymvo zO?$G`zZGf!vO9i#a|g6S-R^t86LdWzh2|XlA#Or;YPQ@O*%=kGRZGtfdGTB9k!E;x zqwRwk03U7wNVWsd(5+`JusT}z0FLeO2cv!L-y#P3_IZD$4ErORo3RY1uUWyWE*1=W zrk&KrJlbM~hNL;#&UCAlAIJjm(woLG+LGatMpBi$8deUHC!uZNJ8@=DapX5jD@w7p zC~dr)=BuFYcC1bNmU{BG^zgOaiWT-IT-)42T_@=M!LIyL?E+F7#ngxk^xzB2BegtI z?NZ9xrN>3WU9C7PI0<#)GCaav!`ko8b&$X~FX_Q_;J>RAr7!<$48s^Z-?}au)LF~4vHffMHSyVE0 zzP01(>0#?yX%Mf0R<+ghFZMPT+?!JexjF7rHb|{7LrUDJ|AyKgCAQE93(?_7=xDP5 z%*H4jmwj%wqzP^|iZxv&8xm+XDs#&ukvE0aPX*OHsWJoAQv~}3Jsq@pb8ccuJKG6u z;ZT)^n5nuA`QSpz?jOLjJ&jDtoATO{c)O-6^Xv1xgapTFaBnkFS{rfE|7;AiShy^> z!=6B5QN{)jLH6bVJjHX!P|J*#`X-`(`*6^0n`3j0Z@sue(6>75^PhI@R~`D#T8!)8 z%Ose3-wLq)<)Ob92X?I*cRaL@?rZL2F|=3SZky+#AKSH35NZ>w_H-3QZf{BzQ#_t&j;~5%wB0lq+sT?_ip6=omo3$8 zwt8Zk=FgAREMs-z;xXA(d>HFXJ-w(^o8koYL4J*K&NvKas6E^pi>i&z(h{0IFgcXh zr#pQd>HX@dUft;M#!lGDFcy%kx9hDd3_=GM$UenZuVDxLn?47o$|5j)Le=Jst=~eJ zLxHy=Gu+yg5Sb4HeEu@s5=wjxqTOpocw}_Oh3XG00;LzOGIgMO%{GV|^cUCw4Lrey zeFa@#gcf;C*@e)1z&6_^%VceC?4z3NI#}zqD^85HIVU_$+?DN6+VcAiTv_`Aqk8!_ zxa0q2+`zAi(pA)6D3QdnlhEKWpRge#%&v3F(RKRWK3{I@1qCBJn|7pJ2GeEdN@X%d z+L6M%^7S@~OqkJ9<&o**cGqk6e<>{!-#awf9mW z_O5DfD_^7^+Huh;oH=MrE&_~o2>+eL*5Ul`_;{w0Ps8NIyqm-g$Ek8UjWmciu!*da z`w0I43yiIDxmYw1>yo+r9FoOgVScO@0W4Jts(5hvQ1Dd9S{}3|>Vg^(>v^d;fIkEt zX^9LYmrJp{_ky_qHyk(>dPr0#o!M0Ep(EbW1w~m$w(mJH#N3fGh>GnGwo4n{$gc3% zFsbxu5GN1aVS+HT{UI~WrJ8|1XfVRX-Eg4KnhvG*m@8n13oxj_@nW@GB$*DBVNclG)!OiqwL64q zEI@1_xt3erRY%Lxm_KXr3>zygW(d-|h;*yme(aeO#>?2zrt}}GP@7o}+wu*#qL0DL z5~hc+tA(V8XK{eByUF;*v(<*PsYR$YE_1iTrwWyYvPN1Kx6K;B#_0?6`ak@|ut^@ZilP+z!ZOcYRVp`qO$@LPV;~$;}Bib)>g4Za$M-NZ&X*xWcV)))ib>nBk;h`3a6+blJUXvEBVTl z6J;_CJzkW?R$zaCM)sz*yd4Q$xw~onxpFdsA)%;kW0K} zUQ4#vK`-t58Jw=(GJ0mX@pXICHcBpP)FJ+A8gYJp#kIcOE$`VL`EHL2BkuJ(Qu!w; zfSK));~0%xO{LXxDi&+rI^s4!g**ACfTdP<%=H6E&u2BVv_pXktSPP+6}xrJ!`AlO zQ_V%OA!uWxjft#9)@D`DGe6$ZD%L-6hf(*6aMriEh!R9H^8g`ESfgU8Xokpem& zUi2lq{ggiJMMLwzW;k`^sYU6m*C~SEeb?j#S)*Q265sF_GN;29kxZYL2~2!WeQXY= zrc%@hPdia7(6s(SQxlVz_k0x_mI}J)3@>*<&d4L-%*k3>gOP_r^-qYBP_n3XE zF1l>2PNV=8qka^HGydRTl2B2>cYY*grPCKnZ|K=Ep(n<2Ea!FTF%Zxw?Xi*KCUdc% zBb%ZYvAntb^*b*<^+1MVr2@I1+O~$2;V0L+4hBs@!s~ZTeH58ZErxyIq7Kl|F_!pG zXsw!3-gMccP0OQzZtw}v4H@7KIZ+M#Wpxont_ZS=4n{f$X>n`;T!>wl407$5 z{pza57O*i3LfiP02TK8*$W<{?*vQe#3QH#?FWK8EBXFABH4_9(oZ5CNt-VG?7!O2= z2$N8)ndxYHAzt%H_(q@T7bP`0w`-_<8-t#zLz-%wGxa#nwNDol#5!H1jwV9~rLca! zsixMs5b@FXT1t)-4zI^!^FlVdTwN5lC91hL3_=;~2aA~`LE0%zY(i~g^gE(?Zz3hq zeVKvU0sRsJ%O!#C%Lu$*-Sf4M`HFiQ1Mv&&#y&(P){^7z`Ywf>h45_IGwB;qU`lNe z#IXYFxAKR?c2+ovZe@rTl#B*=gnjGhRDlMJd4eYJ-YC8yPdT}nXbbj99vs3AaP)9Q+cg4FR}=@t zkVIRfVx#-*3r!F1KZD#@vxUayYXGQ`nx4rB>-K*O8;LBC=|iea?)^@S3pY8H142jh)5%Cb$FJiaH0pT)5Fmc zP}oo&YuO49@hL~$xCU)NF!pW;kIVSEWi)$avdUFzR^$t?sXQ4{{Nb#G#Gyr>i6&0% z;qLbJbNb#t{D{UsPKaz7(`wj)Fo7(c5K}%=+?M7JB)fF`O{mLHe1y3JKe)KJ{jvA- z@jG8n*R1>;%M9NO4PdkFy#60H+tB8~|BKBQ&6!*f)I#a`CKd|NkT1Lti4Q1j#Qj^? z7~glr29A@vN_wcwk)&HlQX3n#WXaWA^srQnk0hBLQvKIv`^pj5(3ddDPBZFaLM{r* z3PDCE5#r=|7eLN*KLfojN&^m7*{bm z8H3$lMKqbuEb|nuuQp^&w_;;S?m|bOc?)6yk7xHYtJIx9eI82^`?dGq5E8pb5~KFE zQ3^`~1;+bC`eH5%ujd%9Yiw4OHB9b6K{d^Hb*lu005CfI%e zU2%MI(sb(Sz6iEuxStmL#8nD?86E${>JF&8<%5&nbe8zG2}K>7ls zXI|+f98PN{iyv;W9*W7qlObyRRNZN84F$6uC7b#7_~h5t%E%-{X&jsWfn;c1&$%bIvyn6>JTY@8L7Lnlb`+tfG^ildaG zGRKMRX%5z{!s+&T1#fPs9L9mtX`PLef}5I?u<2J-#xU<1{ZfBj$V|WA)r74vZ}j>jcm`rSR80 zJ^Iyz_36HJlST{V_w0)o3pl=D|B%qN+j>b3c*lisc&C|~9>cHpcST$`xwgeI#4h?= zgr%y6@Xx;54%|(YUROA#pN8*P860Eml9&2E}%lQlK>K z2%5OOom^w0YD_fOMkKQz48^|2VfQzYc#AbDc04%9&6Gs-Vf(XD5c!phtqd=(^^XqH zx7S3&3J^gtu9}ovu+Vh1oUG=DvD5aS6%7!drX6PHB20}mEY_^G@uu1b1n||1V8;1- zJPPl2jE4h@ePpjFV)k}qTynDsbP%rSo7<~QM{FRgRvR_m6p zB)hIog0k+ZQ4c)?N6SG_tg%)R+rn-~qg4T-EN8msX&A$48?vBH*rYSWmzLSI_KO2r zEh+)0&R-CO+$ghRr_@}zFJ3M>)}FWswWwdQz5PHBmohVZ8qgV42_A>QRSQWo>YjMz zwwW(XOCWagWUV)RJQrT&)wmFNrI0 zp`_6%w}X{dEqeIiT0ch`m9VPJEKVBB34AUZ%W(tvYpsO>X3aqqwzC+0IgqTYvQf1W zSm-MgTA=i+;2R;+ybY}o=+MVZj1rW8#SI5aN4G6Ew4jy#X_yZkkf65_7>(Lc{Yfg+ z;z{GOs6VMxxKgVi&8y`jRIrhxg8!|xz|-bTH!wnQ=xurml9+3*_d6hxFRR(^HNZ@m zOEwasl$xNT6S)9Vb~i0z2F+}F<$&e*t~BQ5XI*H2%cNU;Z%uqJTBT}|-Y{;+yP>Qd z%__e68vrg$nt}mD4?QSrl3-7_gu>BTT-l4}V`v6GEKSPL7C_5!rS#~xft&o)zzLYJ zO*#;LA8Z$>q8(;4*qE9nr-jpnLS>s&HTVsjr%jZ`qPL*pLt6W-O2*0`wNOlTSxm*A zbs!x*$hCiZNpDHBV(Y)8b3rvPoliftw#J2O-ifc;ia z$D9!?U-?!08rruhbn|oN&ey=dMPg1b8@+mXZ(F9z(Cf9Xs7Vh7XLD4}XcDPc4wPpM zx=gyI-b|fuXZ6sJdPjpaA(ZH+T`L{rgAK=5MTLP2P5g%*&dq9BR)$dD3coBd$r{IW zXQp>7Q~z_Vj@6+9FVR4no3%hf!{Sl5^W0RJ5E37ir*MJcDn@C76CYE@@@txSaR`2r zaAk+I98+y8?WWUlXwr>SR}=AyXt)N*U8Ae3G?%enj~!`JMkCxP%3q}ZA+^LIaoNC8 zl+{6$%M_Z(7MASj;E`j~KRu+Lwb#wtLCvM{x4S*NAKvmX@e5xn_wE_K(Q6{Gwb`){ zC3u#jWE{%Sa}v2WwKVF zz_h0hwaUh_{El^J5opT#-mi*(z{NP)TmbsTnk;oiATiA%!ayUe!zfDF!;OHw59i56 zi#b~uVj?OaqcpuLcnM@sl%SaUPe$2XrfS7rQS)y`36tI!Zr0X(`>}R#>sU1ji_+Sz z>mu8H?~}zewjGqtRx2W-i$*1AMH9bEf~~E-^(_E@I=H(HcY~)d!@6-(3<0 zL-0V=));o`T!+dVXrBaE}VytA={s$qb=qkN$_`cmoSoi3%1!RcwK#YaB@(k|8xG zkq?ECvC~`=u3g^sH)SQyBUtO+2y<=8wz%E{dAzXrezzeA51e8|=;tKGYr0&D!L(^1 z{EmpLWb#MKX``na-6P z9vjCU^=G!;KT^Ea>DWUOC61h9KNS(tpC_g*f%cfeJ4jz+uVmFS8llZ7QpHb8T)IGA z2M`3_h%3BGevcG?=jlcPyOc-bd6#A^pqFNZtpzA~&VbIDZb?cnXAWaEz-FrkY6LM#|MS(dM^8-F%qA5$(F<7rV!tZ4mpHg~V)@otLEkokj?-X)@h6~6@ z5}#xcdKCY{JOGit3MRA8WRXlYU^si-6KOZWjGVAa!x6=!>Sco_1Af6X4w}Vu^r!n1 zeY_UAQ%pz0=q#oxnUoYIQm=H}^|krV&0$M$t1s_OL)04RbF~VSVlF|Eqc=azchl)x z#i=y9(ra9|QLVN8{T=LX(KxAKiS${b{0@;s>R&P$8ApC1XVbf}dnFOSbh+7nWw5*O z3jZ|1MP^33b0L#-ZvvO4V#Dwqaapu@FGEk}o;ZqgKfbh`K&feibB7jfDxChuGD3Kg z2V5$e9EHJbpR;0JoQ@rB{gX2G+VNLa-?1=p9|WMMbxr@7u0no`+_}jghEZeLo)I9j zvW~J8ZL9H=VqBSFMA;mRiWVl;-kI>#=j60$pV7?FE%S$r%A%2Fa$aENpKWrR4O&T9 z4qI%ife%_*)q z`i+O#h`_JYi9VRHukX@!A3PS!V-Ge<{3lK*P9m?Bv$2N zYCHEcm%SpY7gR!gJqoHzK9O9BUvHHTKLHD)4#!*~xk*&JQ(ar(A#5JyLLR%gGL*#- z##EUL(xs8(@0PbOkT4Fo=zcY4{cy|l^gD>{bXQ@MQAiLco$Uda}|0IC1i!k=usy_7@U=;Fs5iJE ziKi)c81*flb94ZpLsaARLNxB8vOeg@tOGq!=xu3U{DfLmdjy9@YH!3#<|xSio8`nF zsfyyMX@*l;SGY30J8*n?DdjCX(}Ts?%&hlworlYUIcQ=*RXUclwoy30yWkA~zc)Og zG2O|t^p+zXhMKOjGP7>xF_|qkye2a8+CHuGC(T9HMk8MG>U!&e8<^jQ<1-kGY3S=b z%t%>_C)M04)P~cGpefF~27B7g@i-BS?+7M3h~x7{MUX4Kr7drYwbH-q z|0Bkz|C3zU6LXj91!YfH1TK;8=_h1tI)FuG2uOK?fiBnKh?O| zt6QJbSw3WJQKuiU9;Q`{m;K8tuPDnw*M(HjU)trqV-iBIGo_RFuuXiGVbmy|!HQ7q zc)10ys@jnUNeygJd2|~C9(Zm3DU7OO*9S>8Ilt)zV*bqD`$v|Cr)^Yi-f5N?r_v`q zO`_6SDX&C(|3h`kjgJ24m+_*BQaI|RWc@6zqUM4r2Ql?tTG_s99~wnzrQn}6MMr4~ zt>swi+DE|eRJMvb(yJddiIh*~Ez-)ny(+%wMtZPb3r*3tvW>D;SMgMplA72AYzx7Q z|B4TZ{mmv&#G=5-Pq*tt0w#pwX{0hr+udEeTb8Og)lG@kQux@;#h(`6qrW^91o+#L zfk^en5X??;tlol+g@!hRdCphPg($S=945w)bGV_f@lkS5RA9LYdt_V_#tqvNQDqIX z+{P2~$`K#NLuCOP#osJlN2oXWlQPzOk4l9ZU;2x|!7?!EQ0K8Dm^oHbuTxk%><`&D zM5Odbm$8XjaIBY7pKQzoJ&fDY?@V*&W&p4@=`A&(XzT$@eNq5+NpdM@-p-dZTWk#} zIfIje={0LjzA13~?=;cVNePddD{i(!DYhiMSt??8IEyv3B~~!YC|f@zMz6}M61=DkcYUEJ=AHy7Ycp@T_V1voO;h+^8W>fKzY9_ z&r~0*xw}~kxLi!C3GC*dcVSu})Rj0r!sR@8_erM8n>xkXyDI8H+w*47$LX(6pFS0d zWEnJqbuOAngj#A*N)aWnanG}}SnNaq4hAuNvrq@upD zTFFVpN=~X(!iM!Y(^rlKijzI1vv~Mo#*-}LNy&`a?N_Yg|5rCFtGGpyhjxVqIr25p zp4={2xk;9+bG1r#Nw3f*SH=cfRF#aZx#nt%?0;XOIj)RzwMh2+uh1e_#<}M!4=GS_ z%4`|uoHZrRL8Xav=AB;xFdJ+|<~QY*t0bud8)R?o^3w5F@*@JLO{j&=W3rJLP>?z73OY&=V~qc1$moUZELivAa65k zZjE*oq4z|?iLOJR~7d}2B^$|6ef12eqci&tYkD=v0fw?wloJiujDciCTqRZk!Z zAto$X^=|a=um;K_L|PFc$z;8sPG&b# z2*ub+jEhFsbQEZeHEjT#IG1Wi-6l{qj-r~O(5;~yx`ooD1>FK4>G;_KY=Mulkt)nu zD(ok{9+^YjbtyO%Q%zE2ZK;IHhtJYrGm3o(C!=)l4PzX#^8JH6h(~uyTtCMgm5o#_ zer)J7|>bR`eKw!F-i+QR-_ibw4$|)Rh9s1!N(5P zf=@lbmXfnPXbV2-rEk)&Le)SQU(W2VbzhOWUICa}0e}b+t`)y%$y67EArbm9+=!P= zvIW|Nqo5qKRl;|i!V5k6$_!aYW|6Te;jeZ$I+kc+mEBX81gj#@nfe$ue+yLqSD*c z4|q$u?C*ilif;b(fLjA@rn7Khymv+IyKAtdzR)0{tNY&>i*9^^X8dMKH%JpMP)h! z;Sw+aO`>nnB#o~JENZ7?z|CU={}c{zHop1!j+ta{X4B~)x{cF&PPdRA-VA4?_(2K! z#0r2s;fs|B;boWpDM4#$v!!Axlb2->3drjr9(^4|GIc>YNIXR4i3b_&6_Acv|FTF& zt+y)aP;qM(b+8U5V@=qF*WrMy6E(zIN$FLBbcANhrPLrCeFzGuhOPl-k%TKjNs19F zULLs!l~$t_P!{sQzU1#T?1!K}m}o~>_yo$~(j1>#%ENS2uGd&MX`yfF&$q0|qdzTm zl_f|^_O9i52?}-&rn>4{5?3bmGBa2}`r3-B(SN?!2nj4TLk6}6m0}rZ2bRS`&PG&Y zDSosZ5luOZb8pp$Y72e3OA6j{f9d%Q6Q~-{oIQkiXnp4&jO0( ztC^XcHBry2s3z;>hAoRywv56c zZ5RGw)KF2}sO=X@s!36n5iye$REKK~TfSA2(pRZc)TFjyxg@35UX898If`-JlN<~zr9Ye2+>=%k*e1{fMd0i80UKX8*b=IPe0EJ!pdiIcR#qgbM8zJI7g9Cp%^|erCBRzJOIq#}D%GrPIhj_ovRN9qWRjgvw%sJd zA<@aNQaQM zTV`TlDeD{-o9bFJ706>&hXBCKLE{^N0#vP-r+1wm>(p_?vyo@~tAk{Ck}m&Jo{HrD zXeHJqDB~~{Qk1Uu@^Y$czQ%?uncS4YEkkbt!BLzR!WMbRRQ4<$-6om0PW}i{@#FXg zt|S#6v*1SOn2C0~2=ZsX=kp-gb=_GCuY~ySCBivjy80oVl0=bN^CNc4N>ucqes}3vaf?ngqgt&>7~z z%mhx0r_nHrK53iy1xOb|q%8x**PH#kvjsDsRbiUPheaSA}pzrl+GOW)I`%jLQ8F&_dgw;-*nIDhQRAqD{w|22A)ypE-gsEec7!BfBU zM)2j}N*KuP$XF#A9*U1wuE4P*S3;3HoHQ*37=0tqRfuO~K=dFBT5OJYxvysEu-Xv4 zNWbnB620ax!o6B3!DC`I{4;y#ywaYMF_RPGtnSI3>axH4#u~~y0ygS>fKDJx6!hMl z{r2k9A0K;X@7{cPfBx=c@8j9q4}ZM+c*b{jpWdDS{p{lMtoP>q+gIoBw2b_{0pjm; zb1*n!X2FWhIZApuhI-Zix zFVCOjU-D7^9v?qD{Br>Pt(Yzdr`TYW zOEioJ-1rh=V>`*@?r1<0rbmCAzdn0+d3Hz|j9EU7281sI2+$tGI(7X>xRJEyCcGxC z9E1-6(e}c-NkraF6KLNpENRtVT2eXPlC%Rk{W;-EBOr}9KaPk;CERPJ>Xk} zeu2R_nhayy>fk+oq?AECjHlFCr+11}GoMuQpJjan`A^lpqvq=XA^-Bl3!D7M?~?pK zK6&}<IH4j7yN{C+rz z22q$shhzGOCH&#H3@iAQlKtE&oqmJb!Vb$p7P~FP?h&zl;waYwiGY zgZU$U%*6089K+FlLPQ%2GL;@(e0ZILtOIVpNhUMb5g|>A4bcbpci}d@)fzk{-xp|vKhp65LHQ0W=`ne}<&B|%^8Z}?! ze!8`)&is69)LnDZg=5N>aRD~^5sBvYOX!kN|60Jb=R9~@nVu{ zwf+{5`fu=Zj=JM*xZk5%6Mm1Tuf#A*)vA|oUaDO498@mf8djs`dgCqJ4pzC&deoIg z@vm3k*|)m$DtbWIsyg?Yt6zQYomIIF9~IZVI*rhL#HzL853*;>X4j@203({X8{Stn)IOej3-4EaGKj)hhjjx3bJ3 zy|qi0>eXLQhRatf)~UJPc#C)0)ULI@JhNP-Q?(^+}0SzYp~Lz$$h7ePCgwejn)f zfo<6bGH*|R2H!_mlV+{{8XOrlQje@ZaOd4uo~-&s9f(|IgFo;}?qm*Do)RfARS5Wqh6lZ^Oyg zXhI?1;0*HN>_hP4mt*i1`FJ0(U@(V36^B3-!I)yO!DlB8;?W=;MFG%A+SyPR+2~IJ z|IZux13zy5KmO&}(`O3*zkK%0^Z!yl-v7`0|1HA*?{<<51A{Y9K+V*F=Jt>_KhUFr z#&hJG_`gC#I(p{ONBfCJm1PQ`hV(fKpnKhV1c^Y1)&1ih6J?44tI_=JL*KT?3 zM%Qj{^`>$oWX(~QvR7|5&tAXc>UHAkQ*&Z5uQS`-B$CX+{MR^rM2 zMjxWdN=iMX0Wf>B$sn6(`smYSAg9duER&GRdALa^>$%jE)pEI{W2$ml6tjQLOokOh z&z0vWNT`}@&5fd$6K5(C1id7a_m8-L7ZoH0dbPA!PKj$`)c-xcbtQWpPWt~KTJm|{ znM%YZl<1Lkwp}KD91fDZ;uN&|u5|g4V~J|TgPY39i>H94btU=Vrt8;T8R%h~D}`jU zH}xvJ0b80}E(=TLm=ResJR2tejGgIZph20Ei_63myvWNLQM@OQ&J5BiS(!E{84UBR z=HRKNL@DeH@FozlH85=#y^)W7WF3Y<-op)tK zIcKRXrlUA}m1#81HKzQi)|5em8VEg9pbV9=D|w|%c*Ux){g+-o;HsY)OsEpTHlsiF zdf{Mzb{`t4eAjG<***AaS5YJABT-$*pHOoQ{?1Gb@-Hwn;FDa9$bSxbjj9C!9_AVa z{!)!i*+bEOgAc7j&-ym+mT@bOFU%UO_(~K|`EzB}%3o+L%NIlVX;FT~S89dEWio%E zR;bz@@ENK{`8PVv_*<~rWWU96&suI;S>d@&n8Q1hp@%&;4uPhgv4@(d6rZd7D8CaQ znfVuA!Y&HFi2Ee?ZQer|-{jU&33j-nl??wq3Y%A)7+8lT#-1GpK z)ZO&hub;r`fW=RAy8G?aOy1R1lS#|*=(edJ2qTh6L$g*{Z&cM%b z2`Rk<*O+jRHi4;Q?DoFN&dv@1Gl!T2=dT}qiv|gqC5+8WXY!H7=q1IH9TPA`h}m;l z9$-x!dM7C_N+79F%Dw~dV~M`4GY-UZ;*rmWu-Im z0p05aulUnhO3EBOY8hESVHl?Il!yiBP&lG}>PX4ch;Dg6Yg4PSQj}qG|-!nWQD+SR}i9B8@_0 z3AgcgGI{qAti?ps-3HeQSqepZOJ~>gN1Pf<$Z8~O16lThY50{$Ln1s9LJL3;o#6bI z{1~9nFc!hf7t4yLc}dtNC@Sq}`LpH~3{#d)m{B%7dZEZ*y>$8r<<2P(I_FT>C6wi8 z9@e;<;*t~Wf!vM0hhU#2%l&`NfYq*(2`SB307-olPi|%dqHNM+`?fv-WT9ZBK3)95 z2MP1wMZ>NU1DY0pQ;x{H%{bz0G#eVK)mZ{Qe}`*J4J-kl^S7^lKkHq7{LeqmdY|51 zemHx5{@eN4n_%y$rC0EImyne#y_v*#qiGO+3*!OEAbp^+DuTUZM}e2(w3thI_4ijF zUtI)yCyruX%M+&;q>gUZJi`cdgQw=S-AoeFRFXw1X*>w0a9wbEfI==M>*Y7H_z~@3 zGD#lT8c7;8y@$=0%qwIw1akXkHX)6i4q(d}j9}9SGJFt{jVNv?#?V%>nn|KvD(QBR zgwy>_dF_BJtZr#8E1qoVJHc;wJ2EJ4s42tRI~&6W28M~eG?q6c`=9_WCWgEyMX#ZI z^!F4mbSH(8F(TW*tKa0O#sbN#v8_LxUmBCZvK#z&*j%S%cb#uWI((ay438d?$yYqI zM$^&}ZO>q>xOKeeP~ zgbl{2x~Kn~@4@BXXHB|@#&}%gY}(ti%S*C~mR)n;!>F%oX+VK6&G$AXc29_q0b@VL(_Y`<|pllbwkU}IaFY6(h4ak;{tf4f349`op z07IfzWkr#K2?va{Ch_-#u&#-OO+oLG61_{Nxj=%wr649yBvg=-%!h_(dr_h8OB2mf zYIM7X&1_0mhez_8tuPFhW-4Gv7ILz0iH16}lKO83@y%D)ZbBrF&im18NbhJW5gJ$` zn-Xej1Ct3O1g#w!hl&HUBtGTia2iXUcZ$;xmYWwuQ@eFM<2{%$=hDC z?mTlo3EWPkOG;OvOt;%DB{qw@byB@syalE{iMFL`{mR8`Q(b#jZ^mBPdj}?}LhiJ4 zrmns3|0H7IrZb|6?xiSb_e3Lghnt-DYoCIbI{!nJs=f=j`~2_4i|5a@`(K|v_x|5Y z`S|lcfBv^^=YNtXnUsf%R-W<}#CUHQreAyG>l3X$SJr7v7M5u^92;{kTOEg*y>j4YvouDySoYS4|IPd40!+J8N&S#cUonC8w3Y0)saJ$# z3hB^X^pK70!66<2lQ}6`fHTIy4t1B|F>6GZhcpj$iX|V?(Z$E#LXD@CX{%dF< zp3JcdXz{@X7xfSKWI`~?E(j-iqfLo{%p!(7Ln{;?V0(j=AwjX5pqQSe(n)bj3LHc| zAcfv_GVS$;!QOR}4AkMpGm>!73I1^(VL5pt@KFBE+nlhZoSd>4q0#z+`x%iac`K;D z2N?)s-6f`$!cIxq2o)753O?C<^R{Qa0KAmdjoW!Va+HRW?e}{&`v1rWVmIY!krmYG zy>iZPHG5__j*lkMZ8U*Z>Ifo+9}(@Du@#NhteXhWZ&3*%!xzQh=&Bq|CP_9NWGr9( zadGzQ&42dJ{?GHvkC%j1pyg5_c_%D{d3D7G0S?Qrt`36fPg)R&PTveiqy%MosbnJ6<{9fPVjyR2lM&p z$fZ)6RM-&-thcY?n@2L0$ZoDHT2S4?(Z~7Do^JYMIwWkOFV8W)N+VH8Q1^ zeq1?dKN;naIVL)&S`ID+ARV1d*$MteXq>d=fW}{%Xs0_&cev7udBgi0e3(Sg6*cFF zNpd#{hvlW{n3e-VQZon|h65mGY#>bYtsVj5m}CqAnSp>2j1%GEn4u1X43Z_GEQ83G zL|7M9O@%Lq%zLt=(jqOQu2mRy8J;>|9-5S`wPQP@D%Fq37J87DY}gx`T4@n`%8ncb zePnbqm|?>Nc!w+ti7GN7JD*f-mK^lM(Kyk;&7cqP6_u68M(8#XRgaM@qUdm@HZ6o* zw}AvxRzb@a2>`k=*+z=Lx;kgOUs9N&DlyKaIwi^_ve->noUBnUqcYxv`eJ3JNb`-# zb(HV60mH?Q;77aD;9H!=VA&w0;!Z^>gNWF8zz|2+Sm*=WW#yy7E~bFZZ#G+epWBhP z#GQ})mv!KTg%-Gvvwl8sK-$;^+-++|SWI6Dvpm2j7KS}wRwwz$5`+XU4)Lg*gwPWm zpokF!1sPpv9Hm4!v6I`)M+J^^AMWFudvjMfceAZtEb|k?SR?^hfXHGKG2}&j7n6}? z$^fAts7>j`dt76Z@oXSg!$ykg%}m2*dpDU(uC9K+c>n1`@AcnaeI#$mkX&6|et7lz zj3|>?!mvM#M?KgD5Iw8RCPP;MgS|Iz_f5(S+6nXh^8vJ3*&hkRC}l-8LtE>xv0$PL z$$ZHQ{44PlU6fd-C@hmqJW)DB)^yXUgW6m$0A5G;;oufHp4tmy?Qk}p$P2kVgC7%q zE=$|a+jIcCjY(b=s1#DhTQVfGNhFUn5f{S5p|d^29e`!9#A-pk^DkL?E2m#F{7*{t z4O)#X!e|k>LczMI>s+JsA(|W=AOGd$@zay18j?h^hp5z!^i4NSvVlS+a*d>E^4TOd z?MSty`qRZZEM201psq7Wt1wqo|PCB70txEsP{kxurYZ z;F-<<((cL53i9H^nZmJ~(v0a2`u-c(@6O---n;zx>SJwEr^TGT;r+cdQxzEP`dm*& zvBebuu5PeRjgEf$2UB$?aNY?AC8(%|JR{pE0G&;uvB3{29v#xM517{aHHyY5Q&uMI z=p``N4RO*a@`A6;nV!ljH?Wp86ed!ZhJuO#<`0)~oXJ~;sa4eGG)>0w4Oy_^5AF}M z?!aj(t`k}T7{n5-Nq4Ar%5DGj&M=eD4b6X}tRzI+P4^|1WJ1j(xbCV57Z;~Gh*xA< zy-y;iYV!rW>Kc%4@Lc0%W?FE%Bvvv%h$e@u6dOcQ%x6I-=DO@FDY>j}no!%B6A{gJ z9pxG50mjV?kKQRc>S(#!+31GW9gJs_aRSf_GRI&wh;QO?IGU;(Rh(hPD*ocB(JGGQ z1Qtwm?JDlPc=m#$3;Sv!U)hCm zg!szGWc~_HB-|JLp@c8*ot&h95#dIEG&OIa=Gif<>?%AmivjiR+Cw^~-=C|qhRkq>ZkSB}6ZrUUk?X*hu}wrbX_XxHF}eB}S~ zmuJs^`O8bA|HH$5f(tN+1UN#-J97~HGvW3i`Wx7anbeIdZnm(9*_fS$ic#hw@;3Y$ z@$(pc|HE1mGOJ-2lpcgqr>gk%kd}GK%b@3|S`|5%fhSCl{^VZ)(xBz|n6QJ$h@U*M z=I71j;HX0P644yPEPjd96jp<1G7uMBiR10+Dtrw5u28)d@L}`VeD(S@S$xZ?sF*{n z5l+xXUq{Kq2o|1^4|t$(Ji^@Z4RbD441qfL6%-?uo*P6@a|2V2dXAqe6=j7< zwP`S@(*RBh+!g-y{rf+(3ZF}3#K1+hOM4-fCPIzi* z!l82QDL?*2EKxX+Jq;`eur&w#r9R-63d9B}XMgp(`4_2F`C#f)**w+j;mI?-hjO_( zQOzcCeL--N370G^l!oATpp+*x1*o2M+R;Bgvx7OH#MDqyv`f=)#`L^^Zi*D-AobLi zPKxpPJJpXJucQ0~j-AK}=T#kUb(j3#WxaMGKuQLbbb}*h4Z{YMbmOq3GTcB(6F4$Q zvRz$`=-2}QI8Ecb5%nmc8tLR4;-UaA1?{?%3dsU6WN;UT9gK-aNhXN8EeKX=Mr*ye zZ#?i6tYNbeSa*WB4~Rih>Ll}nmA5stK*%v6w37^jPoK`;Kv&4LCc`0wmrxIYXrMEk zGXbW`*?{DpMAO-1WI0yNpcKrtD8wx6a1^#t7JTrC?5PblgyPmve;0<$lk#R~@(d1; z)0_^qR7Q<<5NZU9&yR|8nZ(s>t1ew2=XD_{d74n^m(#->;!l-+nukg`;RmCk zxzUr=Wq1*dYTP5=z&H?VJ!F!w(Gj6n~j7RTp@ssNOzB)J{4hJv_wr%ip|B+VL(XMryVXsLE?n$ z9Uc=vbEGmLImG_jO@TNQ2s45KUcf9nx}!i^EK&&<5Z#fKL^@;HDerTL@V9q=fAz=t zo8GI7-#@)Qdj}_)0ySsTF-^WkMgzPkZpL`b1t|~sg3za&Cv!1c$C6n}sU?Gz4oD#% zCJBVc(`6I{(i=uoir63>n8XoVZqnIxiau&nY=F{tJX{+CmL{a`fe5#b2T&3n>hvDN zHQ=pfQdUm8R6dPq%kUsd=qYJ3$a_sv4lQrIL)XM?F=6JSj% z7-`a7vOv*rY9CG{ z`QU?&L*_0Xn-HJ$TK0p?%6WD5{sLM>Mv`LLq%6OQ0qtae0H}-|c+xRY+yYKdHS4P2 zpw$g_urFGSv`jgKB>6DyfN!hxPCo2cLS&qe=(LE9oJ9Ymq@CI0Q2U{)t1Qlnq))b% zW-G!e0B_w3-1)nY&rWPB>3&`ua&H3*Cj3ijxB)AKY4xS&_s)2<*&#m#wuxeu>vjbj zkV=7)uw`n5Bxtg*05k2IRS!T@(zM%nl1^pUi?IYU7(r+!_kzRa8$yagDugNPL%xsj zlTeYmNQ~4NTPVRvbGZP9MHpR}4rtIaq~%eft}dxUng79pmr+zkKOzN&57>jfG#cEB zXN7dpg{OjoXo|=|HN3)z8%?N=%6K=ZKPqm@<+Mkxm~hX?qP?@-4VQ0^?GCvIx$o6P z_Ttffa;rJ-X2j8ueVGrb&Aq$a*uvqOg5#zu)peNWd(~{0!w*`UAAJtHy(QgIS|k+% zD~5I85iWJ~C;a&Vj+GQSy@=pU*f=}O+SYjK^+jH{~L3MC%Gk7Pm=kwXmL1I6j5g1q}&_oG4ILbMbR zb%}hrMeUT+s*X*{CV`Ivd+;B3P!zP1TG1vdWt-B)sn80>gSwL@<0wbDB~wGt(irm(n12v>7A;GmUmA`N_6J7GPZS-htHxM%}QepuJTV7qr^kB6tn< zSXw`ZLIEb2ljc(QHGw#z(v~tc|4pa`&Xoi8IDcNhmU#Lb#Zf>F|tI!)J2Bm7I-i3A@39;37w2|rqheVp)$|9+HRc)CFatnJYg0%t{W-z z#wq4L(^FWZv^l}fs@$#^=%5qSrRO1h)SBXET*{C8odRDvm=p;yMvXM+xJXZR_23P> zLDV1uAOjR%!7dN_&RxO*tgWH5liORPavW%3VhSL&FMs*v`7gg3ft_R` zuc+ID!2byRNSQ}QF4n070&B9?q7pFQP;Aigug{HvtN#(=(43Mbbc)yBfOGVK=)~W+ z!|Gtb=t5XIJ6b#o+bj@m&^G6qxV4gia^&CPAT4Q@#kE;$ox#ntVXqN$Y$uc-fawMV7Tgt(1juoZ7Jdm6X+yeqIPo|KeXLxWD(L{Gm+Hp*9 z0|canFgXp3RD=6*7X8pNGHH2QdG_r2)0clavCv9}JTPje+`~`9qh1M3%!iAH-RUvt zXOGZUk77TGVpFn&gs3`%Yh_tmGBa880l>#tzjQVt@OosoG^F>w0{*Rhb7_ZR%T+bc zLv($gBwx$tLnpW%XGjn^_1OTFmb{LfsBr2vc>t7vi>jenHqOv^LRwwn$qEny!gldA zux$@(0=M)A*eX!X1bXmEtC@hkXE*S{?esZ#XhHU35WF;;e><$f_F$ZaqsA?9FnTPG z441fhj8|N$ZnNFwf`(QL+S1~|EjWnd2^KQEwM<(d%>yhA+;W7@vO=n_)u&wiceusA zcSEnSuO0{99sm95$;mG-mH6++FJAcjpO^CS@!x&?chR7>-tpfT5>*|9-)k!SjIqOo z_l_f_D-@#mfCz93bkK=Ps4UFU_m@lv@Asbhh^OFGcJB%j4%K^7?=L^5w~iU;mf!@$0`||NZ*! zTK_+#**$I2x)0AC>ph};)Eh(45rF$d#|pb?^joOAe2N5qnTz1yIp#gkol!ipFnyJIQPXSv@Ift=rh#$<+D7q9cb&9 zk{G^D_JoW!i>r4X(Gb8mTQbNb!%=ZB4YUA`$5N`#*U8k_D7-bL^VjzuznEgw0}k#p z4Pl8bauo5mPj6qn`yyMeWfUTJI%8CsyW)igyh{Nt5DNd$FhnuP(11DAPk{oOuz31u z57GhP1SfKY6zn{M_hl#JLQ!fttb+_~Nc{rEwiOMUj(%A*nlc|kj7l6_zWHAYw9?pS z%BCD1*zeiY3WPG=)F@+iSl7nD;i1@h@bP7#Vw{C}`wGBlyy3?uk+hOuuc z;TXyZwQ{nqA`qO7yovUhqq2cHNWIAjZ{n5XM2Xjmn@ZG9K)LU2wq9gFUMX-+(RDal z@L@LPvP*GRT0$scWO3{Lq`C46aB5Pd^O6!;vT(&v(bZC!fKyw6*liGe+2a__HiMxIs`=7AmO0 zs5s5UXpvm4GA5~Co)n~GzQ8>%3Ig|t%uzes#!GLi*~m}%`+s{xWf@KDn1S5(pXV>0 zk?+d+&&e}?|L;;h-u~n5KbvF!DR=)b3bNifd>kZUzm=Exo6iE+(_vw?=dnDIj0iY=<1F46Jc8jladyNAU*3{3Z^DVEvdtNQ%h_qm8hjv#@EP z43Cx2OYX$o1!|ww>|L~^?Cw1wiY(pEL!Cro-O8VlCtLo`WEP>lkUn^NJCzj#g|bUQF;;86cb8 z{V9a&;K8k~lc_X9cqUEyeXT{yVdG`K0hTPUVzX3Z)>Q_tZ!YjmhnETBrZ{NI&%wLs zfr}zS=&aJLLb`nLTQSJ8#v+($=F6X%yKBEnVGP`FDC`Zhic~0!8Bz>|z2($8%KMs@ zDH5mqwYA4txih)zdc5nBReiFKM^;r|9+=uyz2T!(k8V-m-^b&INf(PXQ0`&C=#H~n8}07G=_Dz^^ksbiLPuIX}f8<@HzbRq|Kjt2ZM@k5zMustYTH>hb@t&h=t2r9$T$89!9VkD9z^ZJCW*Im z@Nr=CP;DI8uvQp{k*c=LHyAnfG&&fJbE^#ofAl zmLHa(NpZFhjV$CUs@Dq{t6Hq|S3ykG7%O!%(C~PDS`44j@v6mbe-)O+8snwzXc`_b z%X(dRyy(GHHM&CUW_1BAEw?oUl3`(Qa0u!!7!E`u?3L2Xzf8W9m4;?9EX6k&m1Q;0 zSX-;fwl{UwFY`{h=`pD)lX=GZG@Ql;rg1$d8F@a&Cf zKAELq?#0PrVOh+$)n&KFw4eo!-QomyJ#|PLyCyWE1&&?AdJ(siROHhOmaXQ3msw}3 zOE0q>tl6=sMn8Nk=8)$NjYq?>JR6TXlAMo919CjG{>{lv4g=&O#;YC`H6();Hf9aV zpQgj1z`yzNeZ!a78E$whafe-g%HI$G*lu(^mC;HzLu<{LDMFwmoX_qqt7lgqms0I8 zbxPNHES*yIo0^?^HgBiYc9=S)ivdfgRHm=lsps={N^OU!Q??bs(lz5EP%~oJFZ1@# z*cf{!OB1D0^RC&+myL9?40+ntDpx0)q12OK8|kErTy0iAwR1*GUrqjdQM*^hR_NU+ zOO|%4UCsIW%X~d6L6QywoKBB6quczK)K;cOTT?8(D8Tj2!x?j0lj%H^Y};w&y-S=E808fzxkRsBD;jTx$CbNd5g63 z?r8bGXv6HXKI=C~tJi?ad8^$Uod=E2uA)w}OQEH#{2Fwrad<%(-Z3rTD=pF*T{RdBy$ulh zh3SKPpt>PNUeW$>Ya1!Bda8fZs zJ%&g<;G3QINjj~OO4+M)p0_W0cS+^IZD+Z=7dB0*lr^+^-SUCZKJBR-oN2)C3^rlL zsY5EAt}9^~(tO~Upq8HYlN-DViQeebpi6?V6hFxu)%orG?2k8>ot%yZCs+RA0(|h8 zM@hob7p_2`hRpR@9dCSt$1r3p(G3soKc-%lgnveT{?2rXo|ySGILka~v-1`zX{mQ3 z5hIvhDuzv#wv=brG2>J+S%;3vd>K@WnT?@_lV5{=_(*uL7uR}I(o#AoR?wWeI8T456uTrkJuK z-^tBCM3!Zek`V%k1~$vN@FJNB)sW706;9yd45{N5)eTjGU0wY4`taGaXMf3A0wOX@ zZf_2wehi`zqB4-5@P9~$WK5bSm6Fpdx`)d2#Uc&nBcHWgZpqcFcW*=)tcT#sq$0gX z6K>@K`V%jk$7|1uojddPb$3plo;*8z`s?AzvyaDrIX!uL`s~@iKYe=o^yyAOTBOZC z>xP!^`1HkdmT!U8@8tp|J>4mXZrusqUl_^QU5xI|v9qt9NZY1?-9OjZL>M~I#5~j* zOcPPm8cn%5=j_TpaTq7jKdCZy{V4ik(>se4MlMjfvn?UrQ8k1G$o41Y!j6m9Mmb=X zP~4ms&r4_dR4~;q?AO8_aGg6~D(-=Wh)-#-r$k}-T!1OZc7D-=J>X@Ll*U*@LkRJ6 zKS@T(Ai2YkiOHm|sD^vVpdayNoeb_Io$lw0EZIANwZ^rrd!3N=bu^<&7)^zNE#da} z2sBLyJd%xwnDhLW2PI+v7zl7Mh2<769LBDUj*o>U2adAG2VPn2zWOosbk?otgNQag#lDabb4Mf-T(dCx> zDHZ>X-fotRdSo*ng4MQK6u5%;|IeSjJXYerojgB&>f^sH}6aQ@y0omR# z;rEuwK2yXrHH}p_J0HO6&MwR$mOp0abfgTqTyu%9-n>137wkPVl=X_s?>rtiy#%(n&<`JaOjYci{huGs zFa9&wdv2^D3kl83gcw~y_i6OBShMMYpo%A!aqj*7{8f*%A=rCSu2PX=5{R~U9}aHG znxN(7VZW`jl2`r90;(2f)*L?DPPt+nZBCA2ByEwhAfk5GWTcO7@GO6#Pi{AOt`?pR zL^pV0sziAI%TH`)BZ!<+iSKI!xE6LK7~ong&Tky%`;^lEWl%)J_kW%|gLg{&=U<*a z^ZNfXK3@O#`v2zW|K;L8=g?5N-RAD5mp>?70#iv@cKlT`FF&4UnfcF>xtZ>US?OWn zLN+~i@VgjO+K#d|r&95{=2S*dp(AO9Qr*oodbAmmxGOYzTN#p+=%(H0$Cl1juh9`j zOHf#y4Yq4a<7$o*LuyTEDN)*u+lkk@FUJ&RVz``T*pSMxTO3<-K_Zn3X<)d#S8Xjk z&(0l$FsHJ-6UwaVf})=m*eX!Zwd~TP(llA4r&OOR#nXO4Gi7LScA1!ZS0YhPr;!4& zU{UGe^od`&sNT)o^7&T9OO^R{gb}Jf;Y#?R1x>iA!Rs_#MhRVE&I-c!RG&6em;7k zhHfne_boT)wrJ1hxu%=+;+Uycby_*e`bjAERM`3Ra++PErJ^m9(3B=M$4r#cq2`zg ztWjfVCyb4*659n2h)p`BH08yC2#n=> zeZ$itGH6&|mI5(Xq=)jFGOeZBqG((PigaY&9n<`o*Fm#gZXt`H%LHhi=Vq1Sx#zl+ zI$V3M>nOrC=X*K&ugjimp6_Ng-?`_zl+IgwzUyeaHRpRdO0LT|uUvrHjA|vuL@zVH zwlMKlkbYr=cZW*F8Yj&ZyPt+1FVQlZTtkIojpN?1R5iq=uQU&3r7CsJtn4dMU7|7{ zP{XWQ{Q{ODOI@c+`3&0*k1B5-mDsvvrqmH**fgQpIc|B&N`B*o?@d9Q;Z(`eLq|r@;8aKEqyOiBVjlWef@5;agg2>OHZtiY^vk&>y}E`u&%`f7$t=J=v<#|R3jfJB9hH%v^y9>T z9v`1P{pF>C|9txFrN@6R<>T?69{;(?_|I_C*D#du5FwP|O%J}8gkG{ph5*T#r3`{8 zJu6#IHU?!gOs9lcA(9ZUHG5F9gqvhEjlNH3;h-1x`=u+^{C-*e+ViroSq{XJFQ?e8Hp2YZ_qzSg_-CJqP59qqRMTxtjr zQ~w}h{k(4mykZ^J&4S=i3ag|Lrc$ztRf{od2!JUcUGk4Lq9`{CAN|R`{ZQ|p`3J_? zxVp;1RKj`*HJ!haBbnv_FgX}sEQVwnClEM`oi_^g(R*#uNAh-f2muc7AT9|ho~?^g zIoYYF7#T`Rk@fLa{>l6K*Zk8FzGu*2{yC=+_|gd6gKvn2N@M_O68cYK&^9DXypM;T zL-J5=QYo2IMY2xwxL%@{B9a3SAb1Ut0>^{!hOE3wr^Erv6fx>1%cGzvIc|u)F`B+& z1IkG9yw6W|YY!nU_mB?CAw6=&!lW|#MBc;$7QOH49gt{fn#qbm`RKSG^2R)Y>U&`p zy_r3yQTKRFX@u&L!)ygk!)ln-*>pUE7?p!Z8srg{5deBXg}?N)W_3yI#^`0D2c8A* z)T?kt5|5_LU7IXnp-Lu`s=*0`5)a`=GKv2c9pYXRih~i5ClzCY(oXRs{8Xo!I!!Ug zN&Lhbp%9h?t)_uK^NFG(+oRD8l@+&RxfIG89rt`Vq?#)wbaptvCr=582h&Ca%wgq3 zDzhMn0;ht1D4Gejt)XTN9q#tFc3aaEbBsxIBBt%9s1REqT zr-UdVq*ET~(AF_2XFf($ywg>N{KC*sP#5fKASjtuDX9=bO>VZxpP32>9uZ+nkf)H%i5t&Dt_dp3u6%EJJ$GoFL zfQS|JF?0VPiJFGi$=K9KmbajPu$M6@B_9T1JUYa;*Gd0TRZD2_K!_kl_OGL_crZ93 z1fC&z_y^)2CkL=`eu%DV4CK3L1Ti8(fCg6>3p#p`=@u~h)nped<`5c_%Bu!^{!C*aCeC0Y*c7wSZNc{f5%Bhw^CCQGg}fMp+U&Ktwr0XsJZbWI-W$xqdVZCtqh{ znZn7=n;ME>=Q$hU>dC7nBhwK@ISrRRO62jl3CCbg!7$)?MnbM>3>GOi8=uY(0;0u} zpErY9->}YR@iPr!df#HhrN`uxK9U}PpO3b*^_UJk;nV@)_jno(;+q4AAQyj)ruUO% zc6Wb3mYq?8MG9ZlLaqnN%~uK~yq*oH?14jrj@~=a3)Jk4(9=b5j#^h9NSuYny}J7E zz_nxx1b=?~4{a+zTK$@C0PcytGC@(H6{A)|O8s*-8T|Oa{+yup$p{;Y7j{Zi62gFk z{Y@tm^7(Y}2aYQZ1bUN<`g{Y%3r42>|NY z8Jh;~^iO=%1EucA>D3i&gj#nNzn5f4hw&gfh5Z6@T0e=>(JqE?0Ss#rd?Gwa)Eun{ zsD@DHjI4VY{{7RxAOH5t)2FbP!SnAsGJGE?4LsPB-lAZWxa1Fus0u(ZF#6(VsSwfkb_)iQ_hb`g%hw0ev4RhF#tFZ zagKXoIaFsqPRVBXk%-HHC>=wLbjs%VG%p>eCPjfdB|-u#lVvgyTVL@&02PUpEDu;c z4Gv05foS}!E>QZ&ggt1iHZD+H*r;TCmwb$r^O^GHl7jsUZcU=De23~A&Bwq;YHWnLg$}I#?>QGd#@SsC1C#$jv?aDASSFhC?uV?X~FAgZ4 zMUcwU8QIi`bDwnkF|f?nN_`SEl4CJ89gl zNDeCWDOk8&rXOkpH{g|6R#CwO$uYVEIHP1dBQ%N@Kr++v@L%%i{b1C4q$o(!mW@@_ zh>p#EAJQ@vLs%WOs>;3X;n-qp*1~Un;sz~YBSTi3>+t5QbszIeTx!VP;ARq|%}QBu zb`_fp$e06aLqcm3BqfXO3~VhZf2i2Hj!Y|T)K~OFq6%HZIX~ewIB^aIG!J~(nd*dx zF?W!Rd}5@9sLUDrLw*wx>IGp=QAV>lY|=)z@g3~8Xv|5#0k8&T#|c84CwF+&OSuiC z+fE@HR50#7+3#iT&GsvxxsS&H8kkK5Hj@rKUF-%4U(p;0R%0 z^0VtI;yTNvifh=x9nQvpdV}MfEL~osc*yMXaihsTO$N>d0k=T$C_9zl4j5^NtprF; zv`%HaPmN5>$P_;3Jy2gdwHx*zfwS`6)y?`TbIL^v$u zD;@6HGV?R=*C=7{5MBsS8CB^z7Y`fx#*y6^#rlekheA@tdMQ)I-~p~2%Em%a$?;^X z$Il@7Mywb2jq)TrYIr4_5OOe2wsH7RVgJL8_3{}&AH)e>_mdqaCTM2Ai>3$69*d8t z;XF^L9781Papi${;;5`!S3}`Iu)&M5@?%2ON1lLm?fmagXBYq3`|#@G)!Va=XBU^9 zd}OsHm5z~m&?*fn@NbLG7pV~>yJt0>5jl;Qxl|ChXHW*mtzKdYk4`-qR8)@3r3RV| zmLoB5)|g~ujxao)#NXgJhEEBIt;(mR!eJX-^MMUAgR@B3ASoW=lxuIYh>#CkQLY2) zp!Aj~0Hs+P(K9>aw1b8~dxkqRC^{2z?1seb@-7My5=-G0H@@U|fwt2&o8rsvEXe{{0+0I)%VbFYOQB$eE^}ih^8M&ds~LrYRFb zQ8T!?8n)FD`iMncBzFdMf+R<*(S_gY(h*-lm)bB3-@ zgU{-EA_#x5+ez>C8e2w4t^0~6QA0PF!@m^$bxL6#csoQbU&0gJ&nPQVtWLl%ody=5 zN4*w2(_3IEUu=(xxM{diC-qt-EJIB<=$Bdz0_jno;GRpj4@yvmISPFy(G^HF-LFn{ zD{!6r&F5(MLLb)(m1|w;eLFFD2?`+VIh%W8U|1c)O&cVfXy|RqUy4T8L?^@&|D+r{SABcB=c6Qd%j*%MBUet()Np=PtpEK^*a*kp_O;MC@d zR&lkTBA>G7pW*}t3Gra0ft+dlKMWpeP#J=}!$ zJG|U7a|XBHSo5Y1Dw_qNI6G7a#-zVvmq9IAeQXXuCva2M;H?XPX z17ak(O4m*@+g4`Mv5t$)1yC9oqd%ar3jV3)7RYlg)Q5xa{4#_CZN8Mv;EscC0&*6& z`v(i;wK${0qIcbu_qd#BsiyB>V0J=l@lv(Go^q%YEy)abS~ScFE8&it?BFvyaz^?v zok($fD)p$MEzOTdFQ6;*q<69Br0h2iT{4!?>zRvJAK){?cm&S2ApF4cI!c517LLzR zlcW%BI)iC&o6JW2g@_e~Dv-s=8Knz@JYlEuM}N{UqBLRZ?bqnhea3D$R^%t*bV7NP zr0o2(@j!IT>xk#{93OE>O~olGQZ>6jfBN)kKxP!oy)fen7?k<<;$^ilS+V)?Vg?t& zl_~{GYdm#$-I8g{0!Rr*fr_KCfWuE=LTLM?g2-4(XKwtWnv}VO_2XM4$|*-nbO1Y` zABCU{VARAVEiU3f_W)RfFq?Rwo3%GX97qM_emqW5e2;!W)=GfY3s*tq9 zw+d4?;?0V3VXx>@-c&T@L?L3$nj#dxW-vL?fQv4rWfmh*gwX`e}6i={MdW_{@riqzY_||hKTlYKg&r^aeBqk z3dP_Lj@G{ts-pgi{OpGmj~BC&1Ow_5o+xKNSF|w|R}OX#r8^|PouznqPBsn6a2$@9 zUQI?vm{Qg+5!u8P9=64(Kq@SpDBTkj&`LH+Y>{A<$|@KZWV)GL?1BAe@&cO7(wZ(J zFL(d=o9H$qVwl##HZk|%s6UAM)c5G!*+2M=&EgQlM|fqvo%{n~WrSsjk=VJ{;4~S> zH*$fWKD>GL@hmUXbV)g#B5BFl&7tsgeFDF9U?mJAHRWNhw4$!AUcb6{(|i5L`Ro66 z_J;5nrEV>>2M?)$W2C_AXgPm<|4|(r)|XG0XBXLx*HGQ1`F3xiTNL1oj<+B|ihHLO z7D`Y_)J(6gvPtCK#N$zJ1q8Wv5a{k~n6;^4`rD_suil|M9GqT5S1@vrqQ=Og&wRc7 z2WqY&qY$QFDf@wq3f<%S{g|pzdS9diUQrTQ$y)-n2N-Ks4m`xLmVHd$lGp4w0qNPm)XLj+A}!pHHJ};A7dS!$*%$3ZKlnTBZqk)_rk~rB&HE#g27ZT8ojaQmVm_@*wEQ z9S)vgDe2*G2It6w$HVCR4Jji279G-5#T1#&?#Ql(d8n0Eb!5gd&|wL7`AN$p2Zw=e z91sN*bA=tf>XBtYR?pOhlPhhyj!i2NV{s{S%in4MOQb|t=%^>vD33zPYQr518Q!}) z>SL~ZrgJzt3tnYmKw&PlQ&(lkt>*#osmy0My4_uQXm`!LSkNw!3*||1ZYuL;s~bz9 z)h8N@<8JO$(udvK*0Xn?-n!x}F(jq{@GKmft+5rL$_9!ot za)8F-pp1bUKSc`sXkhlw)bokAW*&%cIoGEJA$2)*nZm`EAutLk^)$9gGzK5iRtQ;O z@RC&=V<}Qag1ckXHcA*Nx{q@YQ>h&8ZK`?5#{WnfJ8z;cEqWRu#`3x@uYZV!VH5^{R|%d`QSm^y}Sz5=Vdm5b4|i)g7;;f+7N7y zMUyeS$kC$4*E|~3-Uaubxl+An0?oeq-*aHV-7M~t)d3P5K`hnyCIa0#nT?pryl<uecaX^n=OjZC;*ZDCw zjqyck4oIH*N^;{o5V~w|%0O=afA;=uD~>El6h-q{U*U~*FF|$?Af%M4sG_<^K$&z& zparOM)zH?8a3UP=QV<>;?g2^X$Q*0z_w#VZSTE=4{M-71^9jey=KbQgxIjRTGvh)yh#uq0$k#hgkn|S(j~IV*osOlSPG9| z7gDDjflMlWq5CIl{sC=!Td~U^9d;yBw8wnJr!D`Ko{hQiI^}$gbQW^-QrJj{YsXts z@qM_Cgt(Cm0g$8tDXGkg-iNqn=GE1wTQ~lWcH8_t8a|abowV6XtR_B0x5#Z10<9Q2 zY7e8RPd<0JTa&S?fqQb7#74B07pX~xO)Y91jo9hAin%X_V7kk#Ii^OL0)!*6h+RFQ zAWY1oRRx>2aFr-6#V>IM0p`UTO4I1$%)hN8?@yIs_~$esYG(ST+kNi?N+SU`?IQ4y9Ws!a@r; z0a&NI^Sux$^q60!-Is}=$^%nSxzY$H4;JCb^wM&PN4~F?q8>ahs>>}DxIf^=!X$1` zoP}OTH#dsc&i4dMWLM(jC0r~{4nDsBN9Xt>=H{#H^92Nth#Duqf}?-p9=GUb#7Hhv z1Q6NooVGZtg{3ZJv_8+G&w`Jm#L?|mkV?3Nn$r>wJn#4sAy5Q}-tE4B|NZXX-#TZ< z9dv9t`|;#pci-=~CInNRHgTUSzNDIuuYTp{VZlRcVeFoKU$Bj3xN5m8Z+a*Dfp~lZ za6EkP_so0DP;=E}e9X4U=RJ*tQmx%Q5P5QV)IO&jLpYK~(-!&G9UY%_-W`8BBFDP% z7!%;6T(a_b_Bm^$J9Cv%1Kqi+((cf`zWkVg&RF4wCn}4G> zs6_PX8+_}5h#?Dad-&57z7yJASn{248}$ujmYMz0HXC@-8s=IJu&i{6O;tu1K~H%0 z4(8D#97E$&9{uz4>BZU_fY{NwxfrI=3Lak7H!@&WpbkaXUyXoX z33|fD&Fzp(@bGAK!3D)cl(hkrLnaWA`0Yk2oeU-TTj+Ojjeg%--_b=J*|jUoivcsC z9-4o-xw$D~{*inc@^9Bn53vF#V{uI60{RAer{Q(e9%VlOVn4iO-hI@h+4WI$8Iq?6 zx5M*Ehj<)zc=Y$(_lNtP-IE_aeK98l9)1m)td_>A=& zCek6-sJYA1KCza^Q7?iK_MMuDR6eN~L%t0_H~h$*8ELgf;zo{r#S3OUj-wkczfaCn zaum&I1lF+Vm=NB_!;mGK#v=)K3u>e;{BR#ilV7EvPNH@u{qAsxY+s7<^m^pjQ4j_g zLYrB_mHNOJj7t(U3ZtSnA|_N1D5iY^2pati7@E;(CinCel`i4|g2)@YoED0gumB?h zyYcFB9nS9pI=Wc}^-cxH>UP(-o!+^>wko8IHq!$gwKwtgfdCLU2JV7ebj9aVNdBcX zYeeD(fVzlmnJKy_@AlSTy?XUWrE_5>^YKM*9kas0uE1>O@F)Hs%b^oGV+KTd&qPU$1YyJbU?a`_<<5<|h0J|6M`d{2!*5KyXQu8(Y(c z0;nT~8C|RT3p-vjXfs1e9fU!6VK=0KsU=PrC|SGR!=F_T{*CJPb{<_Iebc&Vt5x?m zBDHh6l1}b#H%N5^EV|uYk|@Q%j8@L0>=sk|Ww(jy(v$Urqusd(t8D~W4lK0Q66BbG zORHRvRjWzX3?Bxg*RItkUV-rvQ)BQfULq6L2@}Z#RtZw^ka?!~`p^;AG(1u9m|sLC zOOR!8+P3}<%w(W{2>|n7Zuv2P`kDbu)KMq`4WJ;)c{1UO#d^8*YKp%9ZF5uhT~zze zR2z2tOZeXzRkCW&cjVJoJ{&URp1#QA=%0+caLP>SLdTrIT12&ij6s=4JLiRmKJ_Jx zA6XtK>CmY}=#=${^5LnBB)}vuOLAa2QSn+d&yPws(`oDA1u8UB#eL5gqY=)=DfVm1 zw~g0ZVfS=TBNdSJ2Ki_*k^F$po3o2hb4 zhT%;Z4^*lQc@#cZybnL+3~-ODa7AYOq zV!WdW9TG#3=uqo0T9FzEC2wT6UeR@fP7-KV_?G za-(A;o-^6a?3?t_l|&DDMjj*mM9(v2YPC*6PxN@rD9kCy;J|q>4BN-J_ca=$IiTA; z9`2DLb~oqx7Ytpy_k|Okn-`|hs^g?6<6`GWhGT4omUFJzWT{;z+??roSUbh6n+b<) zF#7L86I8mnZhV&s=AM{kpu#L4;%5wAYdCy62!`9}q6@3)fzZE+j5QRD&=q8)M8my+| zDYx3`hzl9OFR#O4d;vRXZSC1N&GR%KA|f|0L4Mv~n_~kv#|D?}HfVEf$SC0OAM%oF zkM3&pu?f$c*RSbccyU> zhT{lY>BlMOlRB`(=rBo|Rv@8+@0H!t;P7-M_I%8jUrSfap?cntA_sH%hS;RhWsgX+@GjS>rk2jkpC!?oa3?=YA{xN>T%3%E^N9Z3@cmhyO8ozF zoJ>a3aDMib@&DIbn->4y+Moh<&?%DKJkKHXqv<$AWmu5-qGE<5V-}pwy@<#8@p=~#(qJnK z@-AYo0A| zsYp&)LXkrp-zC-dC`n@$w@`(z{2jU~c{gKSPbIUF#ds*$n@PWz`#W$-vdO@MxFA&> z#C#l%N_rFIvpVrykSeDqK$(3z`PwPjQMMjbbRX7ixG->?v*XYamnTh8Sxkzy9u;Z! zUbFW!=i__JuH(sjV>D8d1IJ|9cDkFNgpA}CMBRzU>SEGNrfqSf5q@10tF<&9$8uca z{Y)X4^5~VHY>U;Il5#{{CIz!y;u#ypnYQ9Hvj{#o%yF zUKl6fXfo9Nur1GPr9a@`l7ZP|f7?;d=9jL zw(Y+!x4wJZ*njWiY3#p^{r73we_fqSbizU(rC0H&Vg)JkUG_TkdqFneDEl?E;Lh*-<@QTM1CYUy zuO_f#C>@av!yF{oKRh|uI}-^qcwWxj+5quE^a(b>_er*YE!yDOn6(LcYa5dbDD1R0 zsC|}6sc726{CS6L<|3=8sn&9Hjqo3>v0h&26hXtP&vKoXuC$#JrV?N%|BGjcoOW@v zrj3&F#1cHCq#0yYq&krLpPS=OJB?s9YG(4Yq7gZn{6r`|s4)fz1F!k?3G=fdsl!ym z7!b_L@Kc)ePtWt`>e7W7|4axNl|9g?HX51w-AOzq!(s8j_9}6J9?**M9?!z1GH{-r zlTUE&nn6uPH9M}Uc`B#J0Am~x7L|D^F#+!;X6GUQOsv@a(rD5SjzKc=SW_v5dvwRb zEcfl?3`W8XPpo9zVLfSQqinGv1Krl92^JKD`~BDn^Q_r;qp_XU0Bx9z(5SXr-0r5J z*Jd9j%f?K9CPm`W#zJoR6j=>qCc#lMnha>wS1(FKK8P$0KyDZrLnshoM%vUy+SdN@ z(}&$7Jzyn1M(}zRblB9`lIF1LU=^Wer72XN`52VjOd`or>5VaUy-eeU8nSx^t0M#D znwJ9v{*urDIn-f@EZdtn6E@FsnX#KT2HDwZ3=CFnHvm2ZCXdZ;F%D!f6571hUoWwJ zG>Ic+Ka=?W19dC*gK2xCmqY@hpEJ-F!hzUCN;UzqhwM^8RDX#K5BDW4TLNcYVBhn zPQPYqL`LST)`%|anFk)>Ac2aE+*7s})IM$#oj%Rbb(sjm*^JujEqy?Me_m}cYCq=$Tg&1l3B-W-7~SkNQkzqjYO)t%(G)MeO$EwDWeeq;~U4_FoWdvt)cXmGkd6rT2ecZN9bd|Gb2s zjs5pNp2q&$*ngjn{dW;o*&iA4)6ZiKgRK7DO-GG}SXAIR3Dtaa3+40FgI;Us5X9vX@s(E&5o@+AL zPsE08JFOkRd*>!_=HP){dC?BKAM}YhV@I%Ty*34?WHQz+GjsLJxyc+3)7xmg@fLW< zn?JlYqx;L@9l42;<>-dY0-)PbK~?Ggo#sA{Fo}%triidWCwbj_Zd;8rV-#Qnk zL=rcgyjXN|!EHF1snPS-Qsfub*GHrf)__7egO^JxX`cIfD#?GHPCSe=K!EA4fR~*A zzuJ2B+P?q&-DY$D`(B<#{%hpFg~@+yGkXx1-IyR|CVE*D6{LJLl)6;hPUUfggK&3ZmQ9UUCf_t#5uogUJWR?adEr2xyMchMZ-9 zO4pR-&ls|N-?IM3!JR}G+K`PmKSY@-%MbDS*hYgS>{tHCvd5{BQZmAyG3ix@a($TN z-IHH;`~BnLhj9FLGE$XAe#4H;glN>>=bshKM8)U^2GbdZcQ`z=lCq9`j}EJQ?C9#? zhQp}kE5y?`78hc^{rQE;dMfKAuYD5Q3g@Cq=od+2BMG**83B!zKmeiFvRY8k;XCbbP`lJ<>*kSD}0)@gI zW79H7%(Gjb1O0!x$ob_!~Q-U*!mDe z-LABNLT_DIiP!pwwBrrqcA13+%$=I}qD8Ox7>K(0d!$b}qY>YnA<+lBf9)EZcM$gF zI;}nV$un2GlG!OcAvY1mc4$;}oV-KMNd2Mv^?j+0RW!?PZs{$d(5JrN5>>P0RYvNR zRHPY9Sb-P1M9U3@FO<+I-Z+x0_+ZGc;-xLUiuXSGRebYEu)1lBqFB z>zz+tm1FTstoffA?n+?!WKua6OXL*qOQg!2Vzx|@Q_Pk>++yYZm3NijOTL8ZE0f<- zzLaT|De0!9hrGrMrA=?4ypHlDPWyXa{d}p@D3SM3j}GQLDsaPxzQ_>>4Ybbx>=O#b zH@{ez`_XX`?Ug1y@2~!GGuXQ=lq$nf_N;pVa(1wi8oo%pqzvBk7qC@O{aX_B4#}RdXah-v@Ja^wwNnU&}t>&S~5b?{dF-`lnCs+`O@W z{Fh8C1P#i)p;DU8EGHEj$tvQvE@8bu5$mCC8H^K|@ws$J(K{h!_Dq`M~zaT~IPBS$35k?G2$wNg2;Or$Fn>q;d{!~01T zDF_-Ut)3VYMP+8E5_RI1ol&R}PsB3Go~q;&wSwmeN1Kgb_^)3jw}W@$=darB0m7bw zJFqCylqE-?Hg2yHd0WfA%cZ6y-b9EKMg0=ectFM~($xxnlhkZ{5n=dne%oSy5k%&f zzg}xp?~}>zn+&7m zqT3O{z*es(84LUfEOIbO@v1WWw5BqIKnGU=RczrcjO!$2VZ7M+6uglkYZa+Ptt9^ad=D(qIrI$a@%7WQGGixpq6~P}S`~c+3&_)<-ZA z5I?gkKuFXPpgu+(Z^y|-G~7su{%^2Zwy(15!ERK`l6;$1-bU{(z&chQVhS)9%`_Yi`nhcLHsb&udVfn6Ld;>92L zK_ZGwh?X=_T;~#UsUq?K!=<4pQ*mW_2kanCda-_2TXE)8zmv|LsCvm{nBn!_%$JxP zr0RhNT-lJiQEJrDI-iw6I(>!NRGwZx@6k?8detxN|{^5&Ydr z1Q*dQ)c05BK{WyS`K^`13y@o6 zv?F#?yw5JS7!_HFPN=u_ojF|fVRQ3O>@Ty+@H_!w9KO;m4cHOGOUBUPRoetqmVAwd z#Q{cE4-qcuD$4_ zadH#)xm`|+0q({fnVhF&pUJ3W3!^B2E4+_~Av29pEa7;7x0D!E)2n2Hck%I{)LKS% zWtyo`BQ_p(lUmIX1`2!44QQgGjj&!=P^1AyLBZIy!|3M`1r4=jCO`>|`|^R7M1!GK z;AO4pi#PzhAqC`fGoEUfNfISl8*fEW1%B&k6nhdh5r1^98Le`wV3+mO=5o9USj1lH zQg<69MIM2}94Q$u<}pxI;oWf>VJqx7C1;5XNxDo2%NC%IZuj4^D z7CN<*y}I4w6NCm#CCn0xe?0+3IyCp21wCLQEC0X$@BiojSwVBG=ga^5|Fz=1p)Y5M z#aIxIU3#b?&)4tiS@8vt@?aJhOVKxv#X(^9b)4hw(Y_T4W6B9bL%xCXY50OV-GMs7 zw+fVnaV8XFgGR*TyxEUR8L*r>#XFT0j5r*C24q&< zw*C$R-_%%0k`=q8A3pw4_Wr`+zrep$Tg^way7gxsNzVG`0BLpl@vPZxmGtcBMt(eq zhZQlQVE!{Lg=%0PSTQTUR#^!sGw*FO4MNsg|3kLQ#>$VQe`?{vnQ)2bc(OMPE$Vl8 z^!MHOhx?t~lOH~PI5;{3Ql-TtTa3CQ#pzi!S*g#ynJ>-|5J6NQ$k~UA~+wX%_vP>A1AX=DPGxCI2ripw%=UV8#AluQxYe+WucJ zH{ZT){J-wwY5cz$|F7Sh{}B}lc1_pjIjO;@@ z%&JJNCc}+mDb)Hd5s&7kAY(0yFw!B`L~z8mNyjE-x*Hb9^Lp-`zR>Zs)kh->`bMs1 zJbcxg5hij;MY?*Nd`eSj9&t@`5TmuYj(;TM_`jlc(gG&@IHqMUOOPT%fJF;Neate3?hobEmbC?@-6_2Dsk{4RQVCZMsv|y{ZgrY#* z$;9(awqmhfkT>OwmI#YT2!$qT9)eRAXGW66Kve`&ngObV=pysMbR1nL+IfEgKp7B? z$_oRi2v%uJW;sz)X(CWZg+o=_3ONiH&uE89KS7p#L&_fJ=op(#xf=k=Ju;C4XE01v z(RQUBG!>r@2OqvaI63|C@MD$OsrWeYQ!!zV5k3{4|2RH8!WQi4srbkX14tM8eYPr+ zC9)K!J?r2CACx-dUOb|>83q&8OC28l{qPLCwnL-ZUE7;g8^O7?JMjVG6=%vff1H`U z9vz=`b~_B3!Ro7`uKP(uEuBXc8C9RVbFz+)D1Q~+U`DA?H0+atiNhDhb2$U2wgn-9 zxA+vUy@<7n4_{0XYE_3a##;5lU?+I>%)3KS;>zD3h62O$Yj{N{T@Y&r9}+EzZZs4Q zPTKZhh&JWcI5~b_e`mb2a*N>enU40pd^Yu4T{LAo{L8jXu7gV-It`ini$YtW1##qN zSyVoIQmx@b48CwQx+KBJ^kVEb6b^2~yHu$ZWcp(Ac*0CKz9m6|>@;Pp(3-i)Xc@#F z^HG*vA6&E>N~nO+wFsBCD2q@l7{p&uA7cAVrGHNHNDe-SEaw@ys0>lM>x;y5T6Ak@ z8zjl`ot?7-pDRkJ#Np)ljUEAgK<2oF|SjX0sQHN!>L3G0xDyi;D_Cb+B z`yGrI$cszmKago?!nQg4@K_0xaD@N77ZU_ zK(lR&E>F?p+uR)8%+Yj(JmyUW$68vNGFDh1@o+#(dgSCvn#CjC zdzF`wHRUoG%Cj-@i0 zo0);I#c$Q9jC4X|%;;{loyravI32o)V>RlNq)K-4LMToMYndX?G;Qdqe64ONU*Nfb z5Bg{Oj0at`yg}s>dmOuf$L(;;`o>;4_BWa1^CP>nxBKys&N8%N=<^Lcx)lq>gsC>5 zvX!hbzOWjHpELm=Q+Ob3>Z|)aLm>9_SKbCoT8LU|xBGJI)$8xxy#2!;aq;d%qQ$-` zI?JIN>g4&IOhM;vzH3|Q5U51x#<0dSCMN?fE2)jL+BUL`JZek9#z7Y=X9HQ?+M5Zf zp4?F0O=LCj{qb4;cG6%#?Jm+iQcYspIMAHmjV|YuRB_atbI@8Gb%Jfw;HZmYS$GeI zro(j$&OlUFc>9h@mldbeZe&3~7ug#vhp|?~HJh4;={`D8DNeDt69TjNmLV)_h_pSe3rm{VlSy~A%&)4nZC9^0Q_bBZflg{NGm_uN*)nQlab$BZQrq`HUALqr&90B4OJ<$nG=Wb%9G1HG%Y~_MWmCU(*|M)) z8g=9qOm??1VJ_@C9vgQr&ALGHuJQcb@%^rO3sd@Os3uc+#nQwY?RsX}l}uB{GOuS^ z7c!-RP}RIZ<8v zb9*2uwjJyXAi!3G-5A|&n>pEeM>fs3`%>xJyWM};(Qc0x4V~^ilC@tfopV7`YbUoE z?&V1hq4H(5V_asnhI1gU0&BCui&O-)6>%k4|M)NP6+pLQ-5rfJ?^rrCCMBRlR`E_q z@u8Voh6{!JeLQi_Xz}645Y!c$Valu&=jyuFLa%#{nKgGsM?kuqW_}>VbuOfNp-9)% zyv;hD`v5)nYmE7Ww7yX9mB8c9ki*9m9u|8A4V=z)uWGSrYUX z+v`joNKHvb#>cTNXXI<(J0K+cbmg~T%6m|*Y%ghbglNf3WZc zHo?ogEdqyiYSiY3{a(lz3txM&IrJDoZ7p=5b6Ub2Wtwx1lz9%C@{Dp(jLVIfQ}{PF zS9hmx>^tKaR*~K90C#0^liwd7zjqpZ<21~^v885UsDcW$5{ zc|F;@l6=7~B$E^evH{8m!$i|(E#2@kn&XvZ4Sb$U$(l`V0oRgqEOv{#nC!b-Q)+XA z1--;8BD#4CFMpJ7f%5CgC3h}#TypiPOXgXF=Ov(%C$~erX7~J#>t^@QUQD(uc8dro zIJo0R=$s28{Pe_YI;?Q3#8r5U`tjI^j5`PY2fA}0fF`tA?$}7Dtu!Rm(&I4YCxL4E zZ5wV%Ma+c+FY(t)Y@Hq2S$1A_{_81+f|xu876`1}1aTm@n26hE;DM>OICEYxce&<} zzu;ZY;}N(pLl#Mxz{jWj1=1J%06HF9n?EgR2jp1Z5)-@e{R__#g0Ur-vpyZyvHo@E zd@0D5pSX(CR!=;z@_o(R6WY>BVP+1=hxhXDvUnDAJfqw_mlnCv)UDb<;sgBoap zLM%8aM8-@mW7x~#=&H!cPG9Wy8C3fyYFYZvew>a5A^Htu%<3f?kGY=>!-r`ufn#o2 zUDYLAsXXYzYlWH4V28Z1`494Ibk||NJyi$#x9Ezz?cC`vYK%s zjTwAT^=(V;E;A}a*S!69@ICJchlU4MT{ftI4T$;V4+o|*CT?A}^U_FqZZcOLwt;;Y zuMc}NQ=8{9DZ4H}L({IA^(Y;QEa$4o>$@4nN|sV0v$-Zs*{`?%{hF z$@Hjp@b`nGv(CqZlXu4_ALy#+tZkhh?4Ch0(_`+t_lH0Hc-8?i@9^)tIJ@cbmp8S4 z@NxI#?9&NusOgdU$K$gD7|!&gp$0l9$HyN!XaD$!gPDHi*|9Y=G5zlPu=}y|@$kqW zdZ1zx@|G;D`1IT1!>9LWhacY`03II0KQOC5Ox@P;yLS{$VR|%%&2UO2VtN$*_~S8A z@980Xa&UUK`w0~tPOzp&mD3-OKf*fy&;e=;J9B!_!iWH}({G^Dvt!(XTho)?v%|f= z9l#u>N42w)T}1h<=~w3Ohx-S|085~jyU@_|;A9N}qyGmqGu<}UZ>&24h<;67N)-AX zbtxk31Y2*^Tj2Td;yd{QnfOldNA)*xWj%^VE}MnkR4L9k;oWQXPGf}7;#>KFaW*Kc z=3uibC~tox-UIK|94%%74Zg9zDeeGY)wCJ8-wEEBZ#4Q1#npg0L56nkBq26Git(|NBRhc_vPl?r0@WYw^qt^4?Zq*{ zXG<+7--|nengiQhP)hZ0>=yX)2kQ%=KfM2=9{I&kD!ujue3(r^X_cr6K*cscp`bWD zsCP;^o}K*Ay^46q&Ng(kp9vR0v*plAeGGLwR? ziO!M5$f`Pm?0TxTkTxeWq5cBMVBPJQ57BdJPZlh(E}B?$%iVcZ<;jVLFzp6DF1Vuq z;yi${h6jISRT z`<^LN+JQZLWMn~Yxs({4rhQRSMWHiTCbF3N#+77z7S2uakx~;KZ0t!*gPtXh{n%YH?Qpf8Ukf?0kSfR)o7N*WYBCsS))^*9WxLm+%@| zX~u#=rQwkB!^r#Z+w)wG@}dR8&9Zw%NC_ERbC=Oi*~u-WyUt2e-wf21)!ITTv6y{{ zMAqWfflSDlfSV154dX=33SLfKZN4{u}6!3_7t*#WZJ(Zm-Tr?)I(9IJWCJq`2fWq>Vn(XaXAKH-6k%0FYEMU@*Y8H@t4o$>Ke_! zQqF#|TBL3cg}&SALsU_$Xm)TPuU4lI+rBr^2tN@!nx9lSM3nySJ_N|Q4Lan=qNjUc z=m6dGU6Zq=rg%qH_Eu)eKf^)E==Ck_7pS&}L~-uu*Djv*$eDULrRs`cWIt6IS{yXc zn0_P3&T2#nP$h#ybSKB#CegbSd!c}p9jAyRXom{>3cz#)!L9*tWSg3C0Wt2_p+pAM zU?j{e7b)_ICUSNdC93TN;+ATtIj*EA_ZEF9iJGz%<08U+&xifQe5yez8X^oI<@>oTF``;P~y_r2r{P2cXA+a6?2(oG+(}c}u<^z?4K`C@lwF z$$6G76q;rGI4Lw=>^TXK3s)SSSANQEY+OUD!S2o2L7ey_i*4Di;+(+M{_=ekWW;%jxF8LxR3f*f+BHt?3K=DtrjX%b@AXX>7rU~f z;oG_1TIu;p(9kUyFG48er>C_tZ~;3ylem>oUnZc|w=!ceUq{T8B&YAjOHW^4*E=!G zvqRZ?ds;4gXfVadTcI1osUGh{>VWPRPC&d3j_$OUb^KkblZ4z87mq ze6fN&>-l5t3Ve1o$>&49byJ?bKM5z6-0*xhzC##*{0ReYL6j*fk~` zS*;7?w&WhmKM^`|`r0j@GG`QOp{LvT_198-L{-a6O zEaMa`h?CN30KMU1d;SRy7K3#$xLsRl3BH;AAgK@!lpQ`vx?06{dj{iF-W;Fy0{p80 z_9aYHOzP%lXqL~oD8k$arv+pGt}gA#dy=HHNUg;-&<63U{tCQGB(Y+4lGJv_zB@=@ zIxtE)I&u$9-}Eio`cBNLaX9$AeMsz61&EM`ie4N7!j`fx3C$F3te{9Ecqrb<8|6H_ zmlG!F1UNFl90-~*#Zns-RtX*(r&1-hmt#N85L;~F{8o0c_2h*CL37(|)KOM9)rm%q z>0#_>Mk-Fl+AJkv6uA)C%JMWqtK?K#FXvE6&*HJjunvi+iap;%o_6P9Ix%Tk%(lvv z+ha<*C&A4>gPIu8rUsi#+Q^|jftqsjemgg~$>U{S1#><;$tbOVkMHfkbKSu0oPZc{ z9~32Dk@=8op`hi;{mzLu*Z7xi0+zho99$1!Y>R)h;>!l*L&u2=lJVY$nMHYDqu9WC zKckqjV9tc}PEb7mrm7v}I!PSlvyTsLw-PIV(Km+D`&TvKsp%k{?h` zKvF{tLvX6$h^c9rHuE-Q7K$pt!sW0tS{Gk}b1fy>S_`3kOSudKT>~RT1o9mp1}0|a zBt3mV;EP-LcEpcgCE|IZo$yUV58kpEra`U8w1^y7A0qgu-Hyryv$%pR>kj|7;0uG)OV)fBFQo9IK|EW5-c{N zlbFI}y7#wU-^oyllOWr!fO0fkfpdPTZlfia>2AvBY&L4*F3E_f0!XK2Ed_Auo=6tZ zFmWEzF~Eg2h!T9oca?^JK&vp)GlI z%y2<0@=j*w--V+{dFC^M{t%i7$!>WdtrJ^8)*8=@c*!)VMitJ5)NUGmHD8YKfaQtb zf@QD&LvOX3o!-Wqo^Fr#TMOq;XIv@gkJZdi&deKrbNZyqApyx&je3A|avp&8Ggq>l zGvi4#5{sEB3C~rO0}=%Bn?GLt_^ohqNGkYrlu|rU&2ysFX~oTlFHix-yXHqv0$DWv zOEo;jMs@*)sSisWZcId*t4xE%K4lET2#T9Q*1s0xYrNm2RXxBYhF9BKmWA#c(pt<& zq-$NSy;prAA%8YrmNOp?PQ5coONvt(EibM=QjrVt+1ba|B0jg*C_djM-c{%E)E2=o zDM6gJuRbmpU>Vln9#9<0A=Z6ihFAH8_17FzX5o2eV@=lL5h(7>Xw z=|3X=nz~Ej`Gxx@5>K8beH@zUJHW@#Rq(nv5eFph-a9s^CoiMs3=)C8JyMA`0lhkV z9-|yWIWP3o`|4Y88zu8%&L71(XKY;ss9NjaRS*(&fh<-gUZQ$;s4~48f`7=lK`g%> zCR*^k3_+9QCNC{bwBXe436w!^=Ydrh(c&DBsB{fcqRpV43!uWHi^3|DbTVr- zMFBb2+!K3Qt-sr1tj1?fX)$Hgw?H6_?Y)U;UCQCI5KVu%(W$0-EUs-?>aWW%1uHP1 zi-(upy)nUTevl;ponT+*#KyKYx4!bX1Ns0$bw_a1cidOEw05!y2qu( zNjdrq>|Lii0PU2`y68G|{n?yn5%x)@I{&y#BQtvi)H9&9gk)p{Zsm*wg3M2cnqf(C z9f|5HAHx&9159aRnD@ad?ba#j@<46TV5C+RiU8GXD49C`bxD~Ba^>U0mp)_5SMQx8 zp~j#T@t~u07B>_))+%4Pv*#K-sS+Tv|LmGx68}(_eyzWy&TcMqdRn@M-Lg%Z;aMQn z|DDEQ(hzbjy1UuixZru434_bJCZ=H81+GK)tD7`yBXF~lB&Mi;m+esNq->R*(6Vy-U^9Fnwc|NbzH3(Be6sxK;{ zL5e>^EjFPfgoQ*_4;Ban5r)1*&cBDg2fetj7ITDAp!$H;zD?mTw0{sU^T~xHZhPP9 zGLt5AHvTLh&a&4&kO;#8IANoPS((sJ%U8F2Wajm?JSrV7lT>FLA0o%nPm?Bp)Q=o; zcs2S{ge^=AB#YdsM66$~h`MAe_l)gHW3dSUe#ip3Xq13qhhaPu1}ykwaPKxv)K-@- zB7`%kVi!5(CGyYZI96tin#;tN(2LiTcQiBggGal}%1)|NKuZ&?mLhol>yC^CkiuC$ znimd1#tW44`Bp!k?y>T$J24>O^WwkGagpV}zCNm((%Ax^082;IBym1(kUV3}B=b_c z8=EzN-c{-U(82Cq05El6@;y`QS>LOQ)pFXc^>nInYy0Q2`OJ76D2(b3FT>gSj1Xe8 zV{raUY4Q=C;_hkYX&qkGwxumktT(c^wJ0xqj(!<;jp^<19M25%*`wHwkL|yuo@TSv zwPi3HY~cX1w(`xHwE*JqEjuO|*YAmG=ugCETqTe>+_oWsrX9Id0LOvp5KwlynWPMW z{As{hNfFLE<}>TF)n#&({djkccgigstq)p1YU?z+UnH6`DRxwi^kGr^L;h1jK#k{W z*nRbiWtDKjxS^cxRT$uxt=5ZO#ryjAYYv>~@R~>YDEEX7WpyhPUd)gcH4C}Z@F9Eg zi-vEP^VH+zP)Kw7CuukdD1`WL(UXoq)E7UyUbpw#$n%eP<5R=kPgh*&-H-Jre{RXo ziTe_AHIS!x_WaulFK9HOrHACu!UcZ04}DuqK-4I)?>;Fr4Q=ejayDMi5|1u5o|?R| zT8)}(yGB=z{uI^Oww-Q9xY&~5oTLJtz&2W)?M**?ZrhM);4lq(VpJWt)pj|BO&%e) zq1*Lbml)(Tiv%MPLL6uvjaREQ5HeWWJOfaQ%3Dt!wtPIp0q7W+WzJR&E{3twqs1*>#J4QxAm<<<2PwhqWNQu1E-W+RWTp(e(`gD zwNh#Nb#sv6@-=U(X^Kd(^rL4N;Q6+o`E%3K^j$~3oQz4*vuk*WOk>z z3ykNH&<(0jWOl^Nc?EZ)*^gJ&-d3sA@B433Gr6q@702o1N0k6ZzIxYM2>c~CR8y~R z(IK~JHmBc^At|KHQ|O{`aKi66YN@1^7BYcta%t|`JI}A(Ntl{LGIaizagQTOJk{@> z72S&1D|T>u&-D*3HZ}n5^J}-rb9VfVzcA;t-v~`>PSo=H#ThluQf>F8w$wbJittHt za^`#~WNY9)bBTK9qeA2FrHjkNy{+Zyh5U1YChILv3|!Q?OHsv{QFco+%LJK z=mRWUQSC@95dn>)2Kb^{^ULWA;ivI4pKf@o%HBq^KZ3K7tD%;mG z?bgPjT;9|kL)3`Gq`x<0VfnLc&cp-Nzt^f1i8G|;@P<&=(Rq;dSYses3v6`0PgHmOw%?2l^x0ETBInPH-?xpks)n$jrc)Cw_*Pvu&_E z%cJ3oonmy8U#f+K_l!uY2ZTr-#}yOgaBIBA+fey9%ZFLjXvJd#+G^|W0~HbGx*J%M zB&t1?=E=JJ_$1x6o11_Ssw&^{OZQ=&yysF@Qd-N)-YoT(KVPRn2PAG1wBZe9gKU_@ zD3um0ZSfFui5Rs-XftmfgDW_uVKh_iDD;V4D&MrBe^eg6#TKyM`k)E&n%jI@F3AT`X&_l{v4 zApu&af@jzV!ay|mi8?wjA8Gy4r2xA~i|s7KC#9s8Fw&eiV;K{l>UPdXjpC8h-0B_v zqQE{9vQL;KyCh>8a~h;EkA#73HYgouG4k=OrMmxp2Ya0Z&{907?rY{jO2UC{e52(Y zs}iIG-%W;eSkFJfJQEI@V;rQThj|Elj~5s^yvCZ8OhfiH>;bZ6>Wo$M6q50pwc}LD|~ikkJklpBS!^JOnGs zJ5x!c)@dG(RTH{yhxCul{hCB<^04p`HhBGZap=57i6|lmL%Q)cX8>;*pZNTx>un(~ z(7oOo&?`Skw`s*!M=kfFw7Ako2OOH@FWQrwW8BouRHN+k?l>>Rj<*WFv?Z1DuB5-? zGpl$(miZx1M_#z9+B~iYb}k+i@HShcyes(!Vt<_s7N)3{@mv+E{BAI{w2dHqm z9i{?v*Vt_iQ}Kt+^BrGFGkw?N>C)9>K6<4qI-S?n-c&(r6r@FLLrZB#Z*sMNRQ)Y;G_0dYh ze%drc+!SqcN2bIozZbSq4?_^{)#rL0FKolZ2nZ&&I174pRW1hh?xR( z!>`{~a(1$ULwd5F^B`F}?=)WBSEr!XYD*QwrXw*jGz+(2G~ZO`e*QDG*myo&k9_5qya~G;M&TnyZ?;9#oS8G|>QD&BcmAd5O1jeOiso#=C@H0~!}1Vt>v)s7 z&)pkp0}!-9MJ>S9IZamEkI5d!TRhrHFImHdUK@M7rK4N5QIZaAj6dvqt8=Kz1_~yk zYAQAv+gko+4X|=rIO=;7^3>JL5YN{uKU6zhMF$BXuqVVo+mXh?1|OV; z>~V;Zi*8NeC(2SpPp1fcWfpWy-1qSb09Dcq1Xw18u~WN96lk^iCo9O5TV0Ws4A>peXM`;rT?aLC@=;G`$}gHV1FzRmZRTm7 zE4E%bKP9g77Bd~b`_6(#ABrwwnK9VEI>n%AD@_+cWwc}RB{F>CeZtW?k2$}Yv3W3j z*HFd<-R;DK&%u}IBky{mPH>TRjq~yMFZw>eqg>WNxP()$6AyS9FBA@aPtZ7mBb^(A zkIXc9BYyjkB0`g;ts8?`=+Z&|QDBmW+yEOgtr>TA_0}iO{Li1Pc~xkEV?FUWGhvVV z_rE6=xl5@c8q9Pw-USU0U4Ifc_oqIFGMA0E35V~FBvmDtPn9XuT->IBJ0@G{b9g_f z=>X*BHdA}QB8cT0$2o)4$W1T{TDOYkFF#a-Li4SszL$2}b>d^0=#7e@j!z~J+Q}N# zb8xb9Ku-*AtXM9s`T@TnY2hh^jiB7!kYSOghNA&^a)*NWQbtG)XlB_0u*5t#kIP$< z^2Y{cLHP%;j9MhNZ!t~f%^m=mM(RK!kCZa{|v^QRz+G7IHK2fM{D9ToQnPtVj= z{=H=Ki%!hTvOc#(Zb>5yTJBbVYDkjw)CBxULlxWksv(1~AGT zcgkaWcwH}k>QytkL2!>TkVG_QBll_sqi4e6`-52_6Q7WgU*#| zi58f{_Uu-Vl@cZ1&w>HZVwD>qkz|_#=E9z$33p;Q&gjOmwtEbxn;qBM%7M` zL?%`rr35_?p>;6V>GFpdqTR1VvG24MgOCR{FpsceA!m6O{*>3F2yG6>8GrdGQiH#t zoSwFa4vZxn2O5PW_p7Yxu;)HDIXamHL!$H1;j$64#6?B}bd<8MqQ>i5x0chQL|_Ze zFeqGhY`CWp8zD#2ka2h=foZ^9@d?+jfZhRAVLNm_fr<{m&f& zT0y;|ZH(ig!mqoiW=2bv@wzaPi z(BXk)FMiq08M7L;4(O|X;i2;w@q0C>|45PJ2f*C=8IXhrKkwR{(H{{Z!O||h?SqL9 z2-ky?YAKhL0wIw0U@!0N;S-1H@W?iMxTbxD%~qC~uXKdWNK zU(*}B0}XS;DxB8IlW^LzMsyX8Uh(>2D_JD{6Iy_GEe@=L)0x9j4ST3*6{*XZXb|W~ zbBxIeSR9*)ZQd$t!bMIJsXka^Px{A$B^bw!(rI}2Jf@TCRSz%Sf83wwFl+SmYVy5Q zGhu8C2nh2G=k-xy2d&gMU6+nU%7$A6slyv|H!UMojVX{SnSG->1^Y~3+35(RsZSe5 zIt1YJ=2`)s4z)5-^py-9<4?dmlSv%mE_2al1BG|sPb>4C-f4=W# zty8`OYgc_1*gvbK)7>Q3zfReHJwQ>n-;;(5xE84@ z|2+eFY+P<`QD(!5umb-{ssy_I!>uBvg* z)SGV+DRpwZ-Qma0!MSwX#>MN};lBzGdHcBjkOME>?B}y4GJV*@-Oi6i`XsXYsla-t zmI=c|oJ!c|{X34J!ydmr;*UnaC>F#wKw<+)ehp`<`Xl1|)`9yEf&6tN5TCJOq)CVE zp3P=^nRTeVwy8G1U^|1&9ZaD~9KtGKP?Ltgy#NPn=iSomfV=oQdpDO`@!dckS!5ykY#5b`EqE#aJioVfrRb)4o%DUc9sgt|KMp}>7@1NG6u z1zR=e7{UkQgn{%Z$nX(bb3H@`@g=Chvc3CB4Gf>Ou`nXQW|WU2#b}a#%3RRd+&wAs z;24FJfPE!&7o_Tg92`KYM2)P$E@Pb!C)H27g1-tlf1 z8;IaFO4919`F8VJ>yRlu_m;U;P3(|$74R5(_nG-rQPrm-3kzU%X0WuOgr(1Yx(`A3Vz7hf3V#L{Khgq-fz`;L zCgkqj?Pm!M=IYN3V`WmxiYzTu$YtwFCS4nTkv)c0kZOCZ(Ga!xA1)h*b# z_Gq|uhM6_u)exzj<%`}hUvU8W!$j#CFO?vt8mI!+DX~yI0Leb=6WiU?Uds&ftV6tF zigSxBC^+K4U_WcTcT3tP`ZLJshT%>oW@MKvlykAbL&VLoj5v0Do~xkDOak?rUFr^M`$grRNlC& z=S#7CD&|Yq7EhI8_@2fdDT7YXtQ5uONtg0LtSUuGwpqt zB$Kmp(>2<$NGqm<qor z2{lt^0Ckmks&oNjgH|fkVSz^R#sn@?sK-h6JEq$uL>{5&a*;|M*grK@z3td3O|!i( zN57-KG^-%^FlDB&8xl_s6e>-hY-j@VSd}Su-ot`L=pR4>cfBv#+2-@+F{MPw@2?-3 zqhJHJZvaN^#0e5)Sqd~VRsqJjyWp9$Du7HKvE|pfVI08WB5lQ!-9G(NFa&{>Wv=_e z130x;RrdzlaAd=!^Qjg8l?@3sx4geBL$$aURr}gIzAlubyvnT_B?zdt6lE3@K@I0)NHCEw$wY zUt7s)#hAVbWa>EWShK0b8Ms%Ek?wb_kF@sEiFF$pWj@nsaMHG^snjZ@VY&RFUMiDn ztdxq8Rjxo@?zC+_bgTCM%&W;{dFfOY8;l8L`5QQdlL`~h=et}M3C5C3ts)ALrEzD$ z18`Ecahj4UUX*6*0iAkwp=U%ja4nW>C-U|`86}>;c041hsY|D2PduYZEPuRU4tV#9 zmpO5jE5-{pfpNLb{&wO$DuUHdT&g2}6 z0iWCmN@kl4V4*uJt;r%{n_Hvgw%tL$;YyT5p$eADOzayv5qq#Dn*Apyg-JaB3#aRQ z(`)r{_AI`B7Rh_7#;a6;Z1z=Y?r+_wak=<^y_zVe1lC_R{8p;tNs!L?+LHea>t{c2 zQ0?aM%Ab!}z&pEOcP+rKUO{`=(TZ)1+HzUX z${dj>NJC)F_ac5_|0noyr?n#r)CLB3sB^_#KWC+%Dqny7!W*_|puN;q8CpEfMSq{J zi7(m~d9uAVw!9D4L>wD8YcmilL<+QunJH`;NZVpN{~|`%`x2JR@%C~CR5)plj!(7q zlPRQggtr*a7Z2?3|d*ODT&q^nw?8jGjse=>}`LfxfJp*DD%s*9&HxCg?z6g?cA z$%tjU6wNuD!I<~XvJ~ARXeuA9CmoE5Z*L~g{HOr5Zeut}69cp`l-lmrZC)Q>62Ci( zr!vy|gDqO75RCPo?WAT+t5y7E{1kWkwXT@*I`_8VGcI8p-ypgHW}nNG3C+ha;??2hzlg9&VM zUd;bCdSd7nPKo{}a_ZV-@OnPd-oNXCDF$l}{g;7&gQO*U}*J{;$@4-t*e7vl! zl2}kGnQ=La06Ua$48bZm3J{@{qi&8HhPdpRQ-kJAq~YW_TZG~*FoHZQxNV+ldkCpm z1cCY7cKeqzxlhx)IFK`m`UoKc_VS~EWg-f5UzC`BCdfr_I>lk14eCa|TgS6w@QYV5 zF?>x75)$^i=1WpF?6 =YGIqIW~2=D-JLJehIC59oaKTmSUHIytH$?r+;85&#k0D zMT!)%u!hV3le2Q_Uz0QKN`BV9RGI7_TY0r|EG((*f%?Q4_KD@oiDeA`_5SH1 zY#vVU+{D!4>BTH#QWt0|aAhx9E;Z1)*)ng=X|8HOQrqO(FvT9K{X%VOu2qkO^YrjR z!V_l8d1{#blr)0tK@@q@;tqEf7R-G)_}ev$1tbeW=XcU%f;V9xtG)Ah%@RuI0;s=T z3b9y6W&g?PIJJ2DXN`!|M0IQQd161U<*Brpn!1yMWYV%{A?tJjs|kp7RI?U>FP7)8 zx5_G$G!wa!Uv9Y^?9xmr=O1!(moFCSxj}MS<``v;5Nl-f8~1$Sn(=b&hkSXKXl1d| z^4eF7>aw}Y|3pr`lFG!t-BZP4`e>obTe`5qGK5AYL`CAHTunoUU2-`~s6oTt2dhGP z{&NkDYOWZ}p(4y&>IAcJ8vY7XF2}yF&y<%S8JCr6F(}lcGB~*64_%Hw)Zs2s6%fo+ zd8`ft5)Mc_8Y6i&n|r385u2w>;VR32!eh`~k|m`QQJkve;=zIq zpIcVC!~lKp$%?gNTR1aC+?5G8Us^EsYO1OdvgfvZf@(Jny#2d1xFWlpXDF7rps*AL z_@Z6N=hR@>s&9-nt3J#KhP7?Z3Xfh@jYF(rq)k&^(nzHAx|uy973NV=q;DNEEv1~d4#b%hAqb}3b3DU}i2Xw&~} zQC|E0SXIUavDs`@2g*QO%S+*4B9YAWQMiK8P5$6b1%9AKKvW#qjaR4CJBSTRuJD%SKjAS5DFte*KuM%6y-qCWL#r*T{-P1P zV|31Ww=h%YmXnBQH&qe#%uV`t4dk*R&BDTROe#9~;PHK6$sVgPNz{>6kmEkXvLX2Z z?;}ZTk>*CM{+Ix>#C7np|8O+zy^J&%>;I9N^T^_0;VUoSYO^XhI3`(c^{k z845aSEfJ8CNx~rWTQDSR7GYhSh}9sYHKF$S-%4TK4!y9PAU`*{M8-8k9P(PlfRPpI zcix}=o%h?Q7=ykAE)?YK7?wlD)+<1oj)w~8w@Mba5Zbugq_CT1@kf{jV}ziWIt>AW=6gIi$VO7m|roRk}><-ajv%?asD zR{*F1J;hq-uvkq1o&e;Zw@!UuYCcp31(xGeAC-jr9a!{l}E zeAd(i{zkk z{e6@aEOuvw(kpgnBv<o5KP^a+!Yr+&m7NtfS1uCB!^V|SgCr`Z^j%3hc{Dk2M8 zASX6hc?>`IoI3M&FOL|H()MKEfbJ{!G4X-#b@z^O|iKVR0jIx6pQqZHIu zl=7$~4k9jIgR&-HK!e<>;0FVAvR66Xd2kZp_haA9b|SNi8jOoXLaapQ?MN&3{Xcrv zU9}o6OIkYJC>G@ERW%Cg#AvZ`>*Oi1@!ti3F`MM^ffCaLX`tdTbMLt$RUw&NRZSXE z$iL}^IyV^iyMCe18;3y}#(m3gcTjb0k?*Cx1P>mp&F zk(d?t8%-k{ZzUT?15kk(v?SZOHQa(@Y6Jl2Im}%o{DpuvieM`8aqH{&Ps3DIdXaPNhakl+(QWd)lLQJA9Tu=pK zM$o#e2Yajk?_Ly*MAAGe*#_VS(o>Lmx-?y_Jjb;#{()7Q;J{@ROPjQO8_?S`6_(YE zN0ZO<_tj21hNn~@F?oj|#^X)s9;-P=T`vmn;h>?^eny;3r?b#u)d5yC0D0RqBavGK z#deWzMx(XI6m*4`@(!fBezn4O@ycjB%StkOC*9Od$3g+asskg>7gmBDCrsFbcsBx+ z2(%_>0zv%*)7Vs;wKLI+(Y4UneZe8IZX`0UEVO2nC69>bNnHSH9sFY42_mqc15W`y zB64j%F^gfZT*@TI(BdB4vp0Gt(Q;c5!Ly+?Xcb`xo=Ki`mR&Ra?P{TJ;P4$N+|Pu! zAd&}LlVQoNbE}8LxVO6Tv`K>1;B};OYVF#Xda)toB+c6?x^U2< zkgAY+@1ds~`5;~YB@qRJ*Zr^aJWdO;cKgbv8RErPA5PcBC!1s;=LIP$k; zq1ECRy)b&7%f-hFhM*yOdsNGJq&9_|oE-W9ycUP~_}apR)h^8JMh3a5;CW5W^-J)oPq7b+Q)nAYV>pVw!j`0edaw|o-h9%zwz?&_JutCO}i-Ugmp!=C&r0c zfc4($_uht)d4>L76(eIH+iJs!^UP-G#(v%};_Y~N{(l(cDZgK6>%R@}{||)xn;O5K zX74}t0ni8fXN&`~6P-jtO{MnymB2R}^klaR?D}QKooO9;0rN4RZmW9)Gw)=z*MBzg#Cdd8fEp7CI0JnKJ% zMWU_uTK8=3Qri!ZYfY! z3o(`}z^W$b1=Uo@eKxvX;OU_WxdVVt&X}%`>;bg$Ytv1nHns;aqLrm*`f_$7Xi_YB zAe?cnMie_>JW(Tz6JmP;kLV*2n-P%33LD4TB{9Q_8Y+>RsMHkDVpmWg23uf??Vt-b z7b&UP%jMKG@2pV|J(eqExRtUR+cH<4Qz*KXp$nfSRV8sQ8!KZ?8ug)0iPlTNs030o zHm zRY08*t#ENB?J9aF+MpU58tpu9ZRodL{T^!{l4)V0oQ7_tImaf71tkJuSO5yr(JYpZEsK&K-QkU_g|YA^|9fD6Z4TUUQY zRJeAwojNRBf%&)TQNC?~0E6rMd-aX<|2k{p>xsFB5CK!d2F>eYP`bKDzx8t$tsQgD z4V*w0N231q-sUmOHsM?y!`o#R9AK0HUkUPhVx#JJ;lyrMOg}?2sH~ZeHca9H}qW489gMHXvTVh9#ic|tXq7r~2;P$J&3LskBqkcRV%Lq7TT}nm_;>0Y9K33z{11us6ZhXw_N5B!oHV(+iEE zz#1lJE=h*uM{!mZ??kg2*>EL^^^^bl`-Xv*PhkbcrE~i9c>D!7M#L25_`s_RV0!;p zMxd1dU=tAy{86`e-(z73F7A&;6e{~Y-DL9-6SOrV0KtF&Fz1V5xDqv5($5MlOJt)W z6CIp1cSnv(%EyKp8PRl8aLq^*eNP6YBFPi@mn8@=E#L{h0lB>1lMdsxsiSjDa!@eO zyuCGQ+i`rlRx&qYYzu{@GtfKVPmjc=YXM=27A+VI<`_ze9*maOmkR^#Mw54brD9A3 z<|{U>R1<$l4`wJ>j)=fgvg=_AcSxHB!(^5DeEdW+AU~aycJMdwHz17xWW0k5Z>?ha z?=tSnr5YV(Bw*{4wxd&KgrW8BD7fIh$(@44$uPrAE}~j;2zxrBA(;IzLDongkvaj9 z*c!1^phk10Wi3qDP16HgU;HCmD}2s*)OVgrdlvU2L?eq>G>q^X4$Niq%lX;p#QEm+ zpxJS3&aH*JS1H-bvRyqh24{pFG05;jmC#uUKX6DNav8FN96k%q z;90%4?1P6l9k5q~Ierh|KZ^ltsM%Q|f?rv3rQRx8go;Z#6-<1H-b zP0g)Wyzf2`{P}=!K*qHHcG}x0!WPd>#A?C9jA6>E*GxPtgBF(OM5ei6THda1P&)F&UkIZYk!-dVnwy-q+ zi2Wng74YW;KdFiRX8l{XNSu(aT6VITXUPCvp!+0{s}pO{g9{nTAbxdkH5>+2VD`>l z`1wFV1|a~++=A!Kq`XA5Kqd5%$Qf@)EP`hM;M9F1`M5);)Ii-g0XQIKnN{3UH3kY= zZ-jlm4t2=;B&YFC{MvC0nWW)IA_0R|V2SYZHjA+(>aDl);<<31gD4JEsBG6WQ3il~ zb~_iWAhADPBD}+9n`rF^pN3@ze4BY;tL$C zlV*U;`4|`jk_bY~qoZ}pU+XjoOsvNV6KHtLi@NRNX7NtqfnedqbgIXwQtfcnr4qM; z*pWNf)K{!6P(uJnym^?QIBJW2z<{Q!N@&$c;-&b(bivy{VTD+P3&6nA&>$S?sS zzduBe4xPkxl==Kv5a2#Io%eTPmgo_GJvZ8WUV>-7=%G0#)3c!A;5d2!)K`mO_-J`U z9FkUH#z{eMKDbdgw7<6&*c+0GEr3#Qiz*_cPh%cHFs-n}?{T>^?oF1t-eR{yCdWcj z05;@F2BwNSCbj*ynW?h*QiPgQ)9}HcWeLtPZ`6U^u20qqcdZMKBf`nJ zuFZR?)6Lwv>>gqox!>_C(#PE(3~&vtov?%X1jxSWr{9zj+!d9_fcjHt1n0dEwRZ{G z$|(4jyRaR|eJ79QGwkFG=~S&+k$4W>1`7MIo3wMFOZH+$Jqw62ta#C<)yYfy0J{q? z!Z(G&OFOXyPtR3nv+okR&r;*)!Uq`h=GCQG4$|ImqhGvaAf(Nbhl6gE$4GLxL!8H| zb)6=62N#1by`L4cmr)CxSNCRdm#Tt|49pN<=^LpLdRO#3nIP0z1^J@(|CSk=T+GA? zzW8E=5vN2aWBP{7D&*o2(J|E=tB>J);RLywK~4~G#&WgJ9aEfFwxMF(sZYXU7B*MY zEhlsIpKeorg;yFJQ$n4EmG8BO!pkVb0G$fmP!aXASSy6L`T>!eqs+sSG1WLqm0JsI zl7@`;R~BM;zzyl2%>{U>q(nnYb8+C2XtqB-d|$TjRBS;GI{!AC!T(uMJL4K!`SZGP zKHfUFE}(_LR)k{1*~PF0JNE^$PAg}c3F{YIOTM~LtY%0!_l34}2}_pjOBv-m(}unC ze{|h*aAe)P2m08yCpJ2^Z6^~>Y}>YN+qUgwVkZ-O!ih1r-ycq$Q}^D#y1Ke*t<_bl zyY^oD^L(F<`1AV3y%h~>XXoV1+|5B=6?f1~QJ25gmS+TzdNM_5Q|?9v{$u#C=TBo{ zPQFLs&K`hv$LyH?MWyq%St#(qu5|?eb2py17SX0x0Ne52z9#na?nTj9JbBcX%_X8x zH9>8(YA?(<2`{my8B5W$aEl%Gq(Q=VQtJ3sR*q@=%F7x!7|EcPT%-6HX*Z=RQ%yV1 z6e_lH@O&oN4JfwKW#E;Nop;AT9><;*{}|pBGbW)3w;P^(JNy0FW2Q34;;*VJGIznx zlP!s+?*Y&X2+M#~EWy#D=0q&k)JGF1-s3Si@5y6PQO^pz*ar)A*7`z0!-gOYq>WFw znbFRpbM+N5}}zQ z;Qxvtg=h%$28Yx@fbuS;9?C_oiV4Z6w-eL%0U{6K6P=XXPhVQxKW;bbKE#h~7Al>* zSrEqE3E%4Jrh3;=@X_M0?e*K(p)U}@ZTaRJ3E~Jrvn_g5MS8ZHD(czY65ERdY&j;f zt*n39Mhdr<-c=A7?6}j@wKl^QYpb7Vxj-?DX^k{BWUHmBz3ykO%YGnpUgK1%OfWS- zM5fKDGShCrNkg3mmprpyMDsGm2&i`83^t~aL^?B$H9AOf^veVbI!geeI2oxtI3hhk zM%NH?m>el&BqL*}&PYKuw++9x&2#M*8ihChTDg<^Iij2-?9SDPwag;2BlRi9Blgt_CBF8H8`!2`8RyZc#Rl)N!e9GK@%Iljcz~~ z0IJ;w;-#BIzyd9LU^jG9s)n?SyRxuE{U?7nFc23u0`N)muTxo9f+Kg0@@a%MvohsG zCimQ*OD<__crl}PhYhd^pBTG+lts=)4x?~FT@*B|OJX`@aM8||VDW&5q9%wde&UYB zA(skdE-8(NxZ=o&A;?LM!*re&iKuk&_y8IHMWMioK^oYw7C2)*bUsDOOtl9K5Whh1 zGBW%|I8IlnVPv|nk;x()(5itap}P&x_-$SHOCIc67H3}ZJTJmTH7xlF5g3zfE>GN_oiiZH)s z?XqNmn$Gf*xTi;znF%U26PFU-XP6a{tc~%*5&A+B}k8@Vpuu+-kIr%g8zLu*hDfu3OduG6@^I6(7)sTh|}GF zLIe>y3mKZ_Tu&a!1UM=*OF~&*+v#xo+toPf@^AOUqqmgnf;B^aj+CgYh^10p=BC|r z$8WWHRcb$LDzc~#gasq4ZImb>uJMh02u+0+$AoEfR;jTxzQa}E2Wn&DR;{`&$E)M^ zJpD*6i93jO9KrmFIFmM{i(a@A>jx~EuMnrynr#lQI@c;w6@OouDQgTJN?27#PlJxX z&*@Q=&|K=#49j6m`brDVZZZ-5duo29I>8>At61FwqMAPxWc8;%gnHE+RNsm_`dBf1 z4gBQS{eAGZoG!af5XNN>Ua-MVVRGjnLQhCqDq;kl5e%dF`(572RY@9R4@)b%$-8(3 zYiBr1#t0OJJMZLRS?gm2mxh>nS$NNcldVRn5`_13l^s5gC2N#cdth?vGk1Jn( zDSop_3(i+hKiWAejbGB;5&I8FKEhp*1se{#Vk$Pr95^HD%?^#+k=vM~VOBZfHy%&z zNmv((IHIQpCo((Ki0TyVxts?){43j6C}-q{^nPaN^D3nC+{;2==47k=-V1jG`-8O* zw3D>?cW++vDzVAKlzHn0`|kVBZ4rL4Kp0iFBa_CRq5S^WPb-eFPH5K(fWsxFg7JtV zQBX#)7RwXYgR_ll7&qail~FcUB(b44SSPAPAQtB*Mn1A1+ES>aUYMx#5tVk&u^hbf z_HVaW+d($C@!sl{Ry%jbC~j-l;8v@e3ceiYNCrWc~&=S&j z(^ia28;^nlY;w(ZEy3wV{b{*L>OqXYc=7h9`GfIkIwB)a0R?AiWLsavWHUmB-e@1t z-L9AS?C9<$rGGZeSP2DiqfA~f40uRS>xbX#WUw>rlyjIhDmz7_g!orUT#6&f6Y-az zji$R=q4^SNl&4%*Cn#oZZz18-BQPqjo{aBKQeVTu>Lj`^*_|}NBvRW``VACwdTI@k zi02}C065Y_Aw-#E4g0rteEppoy8p({4gTfgA)tv?A*Oy};elyrYz?yUDStIG zqVD@A5N&@4Rj6Xi))>YSuBYrrlG(%1<$_N(Ru$qPyJu23_>7~M`8f)GBfFcF^})XX z^bDGeBmR_vo5Xl3CYGP`Ofg|QYCUb}KdLw~Qz9|U)+n;-MB zeL=tfU|?sWnQVHQ9qZdjZS%<(F0#WGO&h==lGmLW>qz^ka zM|nQ1)i9t79$M&eZZtHc1Gyfuj`WOKK1d4tiCIQs6LnQ!@OS(0gzPW& zvU^7+tBcKDL(fb+NeoGfZJ<)dsCHD_tglC3VNRc-9O}CwkrJP+>qi3XK-3|P==y<^ zwYRuE5>`<0toU6vv(piP-#_0^*%ze=y%y`qB*Sy!E-4AcAb}KGTweWf1yZ z057ipo>;oi>dA7LusWRGWfApZ3jebt-OHxeXQ)Pg7$Bte7Z8q#?FCARs8)>Iys_A~ z0wQ8Bf3-(rXme%q=sHB7O(UZCQzct!s=V}&L{Q9#4Qe$3EEfK_s8l>SY1UyTrLOxF z)ttOA1~mjlAKXBo>P8yZBEX~MR-)}DJ#TWvzM`KvG1+SV;wzl}P$hFN zD^^XPB!H;-;!I8y1xhH3$n0{UU>Wa}!h`^H5sU-BM6OjBH=+UWaeP2pyrMa6u9F5n z4i_e-;t3jrKPc#@fcdUcjYZa&EU^aAV~NUB!W;l^6@~zszo<68T48$gVyRM4p(!3i zwk5c9hxT)OGo*;N7RAL)#1Q`A4D)%TMUipYkVPl6%SqGLK6jyv_{hxK(uxriZOcK= z*%mRxJGgS2nBY`DJ~m842oUk(FnbWa^#~%sQA7qh!tG>)a&d;uLL=xQW(rW>PHku4 z`9bn=eaABX5{b0vz%Ud{dRLG|EEcB65*|m)ImAMn197ZDm53!m1k@H>s+(-m#F*BZ z)F&*bHH5-WR;A)$!H=sLI&W8D!4IpD;CqH(MYDX$@Wl=u2q0p0b?Gh{7^>yA4~iJd zB09U}QT`=1vqk5lXur1BZrjis57lmaW#3ov4(+o1;SzSf&6gimc;|fz31uZ0EmfK_ zV3TmRM;Zs^n+Nwjs|k~r))2LBVY7j_I#+!JrV0QeKIn`yXlw#fDs@R}KMZ>PTjGU{ zV4$KaV{30Iqef56n>x7_wjc?A&y`PW-F8?8!CyAzuaA zlBtAvqUIcHi!ntm#7{SVNzB>w9s$T29_s`#t`oH`U{Z-S$gVZFPBkNNdjCeWtf7wIJg4%c;x!=o>=uDQ{XCqz+E(|H~d3@W1b^=NP&ws`7XDpJI& z{YiSsplsMpH|HWxYzY4>bb7s2Pg|GXd(V2^w2uK+1&12@M=@qG7XJF7D{`e(?69o; zXxB6T$OJgYJ)C)V4;U-1s}}n!Cb6jS;pGD7>I9E|IEQp5mwW1X8J=VSBJ38~692*E z=8Bu|H~z@>48wb#nDEPX$9(1~*|(9GaaR_NcS2UsxcbpNamFUPh=yzr(4Q{y1@r7T zZy5Qgo~s(RYe`j7$E2DH$76)PcX3n#v?^=hqg!L!ddj*8m7=>3jn3M1il9>pzpDW32@JsXJT^!eT&qSmbx73upm{LzuyuG zTCKWpnxEKkjZLC1X{C)*$g21KBU?MdgP$ax1}Ylrd|S0--4?lO26K_9P4zA!nUbt0 znm?FQeudU1F^s{Bwn#dnw`QYS`7JWbI5J%O22paqpy2yZYmJ9k7t`ZV1b4V&1306I zJ3EvVu-!el#y z$IWQ231-TbOtej7&MC_6?;&f^S4^27csnl^Ut|=nJdQTl78v+`G&Gi-bS#_uA;%>9 zH9Mi2cQ_gntZC;F_kfq4=5oK^!NiQLe6j~8U$iz|Za85}ePlxWT(S^%vVBHab;z$b zXsvxyFcH`_Wk=phX_vLes#jBSX zU253fD%rCbp6YaBi#a)nr)Ft{>b!+4zJG!X(hi$j9lTgueex0W&GGZAy9J@69|Dlf z9DrV~>T*Gv!cM9M_Lc&jTobys$4Fo^Lor{|qLgtN-|E_~?l;88-kT62 zRh1v@Uqy>va=CO|(9qww^ctWYD9jOWe`rt9-q%r2Q+%O+nL}!6u41@aiG8>7Gv;aLYIzL9@EL}9ky*H>x zBVh5vAt#f@gcG82UNy5w!J11MjFvZZ7)&5cYUsXE5?dMcMj1-nvW-Vrr;9w>IUqW~zsIuYsoc zy7~#EC=>&RgK~pZp5SL1%@VeOMTcsdo31Q4GSP$)-`% z!@%?El(-gdC+_r9A&-IHXhwEb5LPhHRlXVdnr6pvLA!xQKBb8u2bPlYlGu%TxVaY@ z-?nH)XO_kFiS)n+GvoDl!flRZl245GtOsY%2{R{&Lduc|!C1?wD-fya_siuGvKAsu zF-Gi3Ke_>0t**i=XQZlxvLMB`!me1h!z^KMrz@h z5ybd!FR^=sn7Bdpxt5+msLvI6I!a$yD>#93ATFNWNMsYv+ZpJOYoZMNc2G)$`MNz; z9s}T`Bw2ZgN3Bf*XJYJ`!tzC&zGp?R&Y`+{STr#7R_26ld&@$8o)S!S%@FN~)q!5x zVAghyR$$hD^&cT@WmxIpYHG)4sjCh$qOJQe_tWTiZzEsN;pVRMM6?{Xz6Z1!kVvp2 zDF=V8oFbKmIAWy^f-gC3p+aTM(PqU~)Tren**PUwZLN}OD6eFVNr~C2k$gzSRno_o zmvukhoq3phgh#+lZWiAnq5t^F>}|`N`XHZQ-P!)@U$srPRKwJrXSa5-vVy%PFnqz1 z8XXE}2p!?*LDH(^9Y;Y+kXR}4kiUlsog){z#aW-k*|D;+AtBQNXf~EOJ5|{c=npa_ zF?eWYgCO%(Gb=5Avc(}1TrgjHZS%kHFZXxq@Yu__YzPRDWCvTtG|gcP^8?iBs0g5U zj!I_`v}3#o{hoTKoA`>2lzTkrb%cG|Fn(Iv#w^dawDp3q>?H;Na*7C`-?%;Z-1=kT zSk7_UxEbAagi5sBcyR1oU>WPqVV$*OHZH7j7Fd!>?=G?kP3z3Z+NTo!$)jHGB)OR3 z2pLbV&zO(if4E+q_pU;5J{=86?cbP7iAzn<|ZJ9JDbDHZEH@Y3CH3e;{x=_t4u_wUqfKK8H2 zzbTwDEOm_FK4R}^ z#;d{i=bs_7gYj@%5vKn}M>E~GamM=bc0lB9T@I);YZG3S#>(Hh>C)e|95NDu(B~k` z?*QoHKivhW_jHfXGuM z=0xBqcJ7%u!V!c{OY%-E>ED;zbBaSLOoc5Ue;>Rs&Vhu)kT+hvhyH6xLw^+H;#|#L z8c!?Od80N!0>2Zj6-;OGXPcYo^eDHGn5T1AF=XLepH-361W?PcMgo+5ff37-MxA^g{JdD=&XA8K4 zK@P0z2Gh%fnet|=myDy!8=*Awkm|THNl*3!mUFHI`1u?UHh=De)DI??hf^sKTyql~ zvU}YBxgCK+$ z9ZV|^X5co=Juz;wcQ}zgh~eCvwjm$0{Acn|rp?e1PXL+QkN&|#s$jCY+7(Za%DXZ= zKF<%2`|7UYZG3VXWb(>Uqy;+>{Pa<+^MBK&eS`UP2wqU|U+}-pUXYbS)BfJR{1X<$ zNA2?;#N^o#9%Q`Tqx)^=PWBUj-y%iDA`%Ho-Ozgc$8A3cOQYX8)UK#~{59PBddTVQ z<=P)6zSVyFAH8qa-S+x@eh(+Jm+AFw9KZaUJ0EveS1;{@X5T>uAWRA!QG8exl4>e+ zH=ZTm_>foo^`Mi&0wLRirQI68pnV>$JR8zuuE*|<)v;`B?Rv59WOn<1c|Q^K^+EID z7&X|!tqGIBx&?k45d4s(d*MaSX;=HcU#w`<)7&u8BTe<8&`=?FsT~dK(>@rXrcYD;G{uj#}$Y9pv13J2r|A_1CqTJ z)>RZ_{vfrg*~%1)R9RMn-$QHx>|b!KsP%b~%8?wD0V+7bkn5jrC_j(_8@I$I9HC!( zS2(?C-qf2w+^iy@L)k+=)W1iNQ`=-t)0!FPfHSRwlaHQ>?*~9R{bQV-#2z)!Y|La^ zh9`Z1^dM;x&`ftyCyF0kT7>C3S!x97N~_roCa3|zZEf~%0orlN-Y6~TG2#WgVf6)h7!#9%snD$Y_8(hk^o;{&py6z$`j6v4H6z-c-3p}zO6Ed!5VMOVX3AI zOAh(J8ou7d)-hc0*Z08k=?yoK5<;mz@KyF4A0l|3JcKQ6t~_O>(W8~`t8D0c6qb~D zE4(bSmWwBCDTjjDM${)**!|3Z_LNy-oOI3$cq-T-!Ee{3h3}&OD1=@MseibBIVYJI zLccr;0=VvOT1*OuC+gl_D5M40Myb~(H#xj%>9(zNFdL|fR=`hBVUC|9WG(Q_V zOf$ok$zdC4!HyLd+Zz&Q4kZ|VMY2C|WDz4|z;0=Q&NUs4sC&t)VSy^4W^?3nfpQ>% z#Ca#aXy)0%Id|!Iy#+$t}K&5$bM< zvbJPqvIU<=fd(bR>$mAi3GxB({+*~mDnm83e%cUlce|)}ZoQ zQ-Du3Lrgwu#?umFrja-e44Sq9fGP=n(3M@Z$;>h-v&ttNp!#17+Pid{O28#NOB zcgc|U0L1me?mBuWrW)gDgpi3-Y7<6$?Xj8=`o5=V=98B)lt}@ANTg%JHqt0P^xZ2L z!LQb7*oL+wu>eCF|E{QsvaoGtDdFN^)X#B6b78MkIVj(6P3U}%nXg#LA?%~k2`b|% z85@Rw@D7{n8aU~;K2+(xqnZm{z!Obd;BD4d^#cE!Jb)LTu@ihX3|nx#AI%>!Vl?n| zLl|c>8Q~3j=YUX+UsZJ0eHp`coxedqX>qayL=Y77GO*~s>#wtO3W>a<#j~`CA(06y zj89s`=*n0}fK##`P~!gv^W$X+V}5hQF?P_q%&_+#s&Fsig1sMDNyT=vW~xB`e1y-6 zl`Ya(%{xX&PShK4)dfiwJ}gePf%Mqbh)p>8X61!fhWEuP`}z8$1O8*0Zau_#+)sHu zC~AfPu_hJLf}{lI4W><(z6MDqPQLnye447!%;lbMT``P_1r78XjHG=(3te(8leLaF z3_|igFb0pNQM#odZ9QphPcwnbo$hL@ZMcPqE z8g@BGqMxDoXL8V)75HB$o->Y?*C9Mc-|a#cPc5}uokVC6N%cIIi?@fQj7yG~Q&cWr zLy{%@e7sLBy!@;Jm6ku^qtf;XBK~QZH20zeU_VOZc)DbxsxeFXzYh%*07CDxj86dfnAOB@LtB1sKa ztZyCV2r&rw5s8%NpF8m}Li+?!;3~MMMB|uP);<2S3LidoQccIBbXHUMW|KS4PFjJLpdXg zfFDt>6ooMH2zUgOzcr^wsbJvwI0_HqkNh)w%)Q56bPU8UarhTKqEiYa=npNejp1jU zST-Q((+_Es0QOTi#CfBYsi!hg>L%O{!%%YD>Y??C5f<>;Hy9}L2Kb=6AF8{E^z8b_ zkZ_88=PCxhU@rtAv`MmTnA?JDH0_ZgiXETi8ttBv81!E-r}S@zL6MYpi=c`;o5g!X zZU|gy?RlR%Y1M<`K&T@>*z)6$_|Sqx8QnTodH{dF3<*_!HL62_9QeGdsau^0?-8ZP zwR&#Wd5czln@<0&1!AzR#E{#MGwEG1Dw({R)!h#|kWxrUGaVwL+$%mHx;s+?f~K5? zte{Z~oAF^*6;NLDq#J0mt5O4A^o`WkVF~O^G%#})9$noK)SyCw8v+a+U3bD zruj8*7*7?K|2h?07LA$z#dAtCmE7nFkK`wj5T+TB_>!N#C8r{$%&pOLuhCrkU9V`Q zh1No=v%a%=xzN?<6jj#)jGy9NLS4TdM|r0=KEFNap`Bbm_SAAc^6Nh*R4GLN+SRB+ zyAPUjhD;uRZy`oY%cNeoM>Y!PU#w2JznpF3(HqADVOCaMTe z8~06*a4SG5DKE-%;GUp5h`5OU7aP^b&#noQZ0^ZN5oVNO8bNtgwAp4e-FOO=xr6dKzaE=PpLGor4!)X8y;CC!-TlyB0IBrsY1Uje-zP zb30K@!)`1%yDN>M7js4I^{d&Me!_1QdBPf0G%L-*KozIuezURy4O26puAnc-s@Nde z>@cbplQP$iMLwOc`NV%rY>)NW-|zM5cy(2l>hD7}<==;1KKZXx;49dJpmTWsaymqp z%*koM=*fmKvjEGmYn1fcH7Y0aLq>h7jI5 zP7qby_&JbKW)BThScn2?@WtPf6MFGu4=){j#y@EiA0|J7PpD?#q-a1Y@t9)=37f!Y zhgBn<*`%z3n_LU6E$I2Aky)pd)1g}OdQCvk`#J_X$@+~Vbp!N*TV6PM?_>S#3sQZLXpD8-C{$^W#^P{ zjf?;ue#o-40sEKYC0e(#^;U5{lxlrA9&3fZB7`ppRge5pUnF>1mxOHo`P;1<^XymO zQA@PXCr|JoI7>_tjy;E(!d_G+3-64tRyD==?JImbpFsXE(+s=#uStBVEF>PEq}-g+ zg)-GX#yJ$;23E_e=J-eVv_Eaq_SN9^()QLh$*DqVcYa;&C}8+|ym&7GQk3DLVj@$L z`Hb#i8=H~NnuwGQC$c%0e{K*Qu+EyB@aC;qaehkdhALlm7K5O&X6oug$`yh1^dR+1 zkQaiSi#rEgO&Ir&w?ggH;VXLK%BoMWY!-51DIe&{IA#N6pup!3!=w^A8e~nM^fl6c zlWF8-sTU6or88H6F(g!&FzymfvMUTPo`?kHe-}XetPov~;WoCxZnkm{zLG3<3`uW% z6La^dTi(0+v?QkO{Eo<~ifd%ZLk$wQU!M}~Bo%IWN4^_Mt~#zIM26c+uDY7+*Yzu)*~V+xJ% zri|WAqL3Y+rab^^T6X0F{Ke$;q`MxeF@I0ElS8~&q1AzwW6F}LMCfmMt(!EjGc>P! z26pLj4Hd9SP?Q-#h2_06MWp#MxJFUX@GVkE>vzjqdJh`3;OzUxbEiQLa5DeDnwF@o z7N}`o7AsGm5A{GP?nov_S#S87_c3HC@cqTCDSrr3orfdoEK7qZ1*?@+*=Ism12wIN zQq6xf?bJ2SwOd6lA6|R$CnHDF%{r6M(;eZHV&k#vxL=~R%PjKtK4yj+5ZC^$_&2Wg z1mapNn`V5X4ho>H)e}|vU$*wozih3Pz{*Gk{Sk%P1_3QAk`)_Omi)LlItS}XfsvDn z85k+Nna5#c-Wkl8_D^xv)k&HLp?Uz+K^w{eIEv4m?uzOT(YK9kyVtWIY64>}0!H%F z=?|o_ZVgv3>~R+fExs3pP`QInRjKgp3-)$kOw_2XyRN;IkA`N%qF%V5yHYcnA84*H zThC0L5QvU{CVIYPH7uGo#xF5UI-R;KsGWC9g1C_KpoYY#;V}Urei)|z$2$2;x=7PsY;rkjaADm}F;`5(AD{&0SOMNsuFEJeTqH?l9Lruv%OI|d86}omDr_TpWGk_;SF*Tb!%%zdmX_m1O^!OSI38+H4$9hV{9PFQ-E6Gq)F|7+%9K?wk_F z^^g%^BfE_fM0dHr;G+dAZh<;UDSncx!)AvYMCwD?DPGgO=2VM7*s)3U;>cb4-T);j zLEEDL=EyW`_l32yLwVT8lWS=i6q%LrJfC9kVHlyppVwealN^IX4yUmo8wzJ*A!|CG z0J#Uy7l<0-z_JnnpD#{%;`Ji>lk@Y2<5;EGzPjn@j?t`K-B3gCn zCNoM1n0gH&lEjZ42W+dYxLC4LV_f|Vf53>C2C{fdlU z3qNt)NAaHnhd}mz*tT+Ta&Ew?@OntLSsS2{blp~ZmMdepD4t`8_MwBnvvwVpZ{g!l z`6kLHQH$GwsK3jImUdI&BS2h=)P2gfFD;Z4qOr4sNjdfVh8F3SoOYacG$`4FXdheX z!LXyoiMk1uMNUT8jm>xIkp~Mq$wG6OtsS*>)sFQOp}@e-8AUPhJU3di$QC|ojI*NI zt`oZyje8G3E_)F_owtRW`uosPnb>o|=*!PFSpI9LaSBZ-m^EJRFm&xpD;7z)G})HPA8sZEIZ{F$gqxTo}g=6Dh-N6Ns=B!g_Q+@`Atk5>yTr zla{$5CZWUu+|Y2(rNGERpmkhu^nsMl5eG9#MMZ^=MaKO6i2Ek6m8y(rBwWJ&wv~r4 ze5V)wb(v+PZxf*xF?BpU-FEWYW4IS3xpL%i1+y-8n2W};lP2z4+n60k2x-$SysM`3 zZfE^b#Sn{s4072QT#Y3mT05016@?v4XQZ$$TP1^LZj_EE)lZ@j-t8Lmhw3O@LP$`_ zSc6`=O~h*LmH6B@_U#}Kws-_1N&c2{BV;fejtifs zJTWiyLR^Xb!36z^2{_gjEEC&aF&CUW!^bYP~H z+=<+Q*?K%ikj~7eu;KYN*L;+Lk-0F(<=llnDN4`z%|UCU++0w&p! zjTNh9#}phr*R0+%EmZ0;t|>&S#}A0Bwzg%4TeoEgaJ$8&VGP?$I$Sb<@N?)WK1%+) zk*cId&X$>8Y8n8mgx<*_0T%`dq^5GzM=!xjVa_4loE#$z3uBI$|fh}Um7@f@-fY7*IK zLpha;pSGcgkk>>1Ly5hb*CYQ$iIotc#ri%EvyP6@nhDF$?2kFt!kgLO=N%q%xT1?@n%h;qZS=K(o+Fa6|v?bOp* z6-3?2-@W_8E#ZH58vtT-y#k2zJlPI7S4|{VVd^A>v zIy_n?&nxyYCU7)G^DD)5h7Zx;e4G4@Vy~SE!S*w%vdkP;05esSHG3=xR#aP4{<{GC zl=Yo}mbUz%F0Mora3hFJsoPP1e#p7N6_+(ETBc$fRH#ZR#V)2N^uLYl%95BjZk^wH z4bUH`-}`E|pDBObSk(kPhShCDZ9FHbX>bsCg)h)xrjA#;HlwR^+)lZyB&=Z<79A1g z8AxEHj1fu=7f5s*w{vg?BmZg-<|mR%uft!mC5NQgvu9%Y{e?W3li%(b(4V|=q=hi? zG@~cVA$HK4!)ULVPnW$9=#3r2xBunyeE;Zf|M#>1F)8;OQXCTUCMo4-f8XY>_{DlH z7O%LSugg_RSA@U!b^DrpA%7dL_o3Z%(~s4*5D)HB?YFnE>^GVw^lT5)^!&e`7|`jg zSc?hRaw5MS7he1^V}g!3-ngMX=~b;far%(mQCWJKaeh8harZnQ%W-C{B>Vs<1T>K{ zHt0yceU_l&chswp$plebYV=9XR06T%A7f*+aX9$g5|A8|3%P2X2id5XlUpjaZ6 zbLrOeduNOta=fu}t=-CvZkpBJF{s6(qm9={bgjAz`@HmsfL$Fz5Q-{17YZ>*M+TKM z?63RljcSSZ(|+_wnlJV{K(XFE40nmE=OHWl*43O#Ngigx(EpL!pC~*BSda!%$#!Tj z&QY&ocgpHXt28|^@$`Y=7cSnuH?I%$!ghw9gbL2*hzzcXy#IJ%GB403;7)-y99>d+ z&jrt< zsCZz4NGF?aff|PnQcV5)Am+gx=dM4h;U(*L(ZKqu!7eQ&@q*BFweJYSAoW~4ujNOXDO z-)GDW3p^|XH9a|{BD|P_09}RSc@F3}12F1%nvwxd%3_`(f$6<1f!HR+V(#dV3|1qS zs$ujFWst*#$M;Gsyp$qcB`o)AZDOAkgrShJe(5yba4CId?;MNEyjuSL}DX)lEh8{1XU#%~@+uCsVU2gEC6xgb9uCntWU&!Gt*pA5OO zcZPvR*#Q*nzDePJjK|6r;)e;{LdW2#Ud-BQb~B9owT0)9E?yo54&Ek&z14+HX66^G;V5a}nB9AYB9 z{zE|j6dSzp68gn{r`{uyku9MHj#!Z~l;11Nat?$8#ulU71L?x16l*qPCi|`d4%5rt7;Mho z+?c$w2HR{?2E)%UYfDO#pB5MX0TZ!+^IbHGn_)R34U1WtW0+aBYc@|ZCozO24$01A z2dCbUml^ugn0SAM>^un_#4kxG{D_{r5=%vgw^b2+>O$fFrG@t{{;w8J_(uyvwB1UP~R06dy+|Pe$;fpk&7S7lOYGL%@^Z#n$oT0Ou|Eq9^Sm>BiXQ(Ir+qvnes6mI2DA=U2!^RUCt=PSL$(tyw$N zIsL6!Ke`!OPrp!Yc25=Czq9liZZk?f~&(# z`SS@)%{UDK8%Ozn-UxrF>y05wz;Z)bWzM8bB0WcA4r=;^gaOK7HaIo{-YA40bG!GJ zZt48cbx$>mI>U?Bny84vqSU6hszbo|vw;~-7-$iX^<0{ykF$`c@11*mh?SgGj3-Xc zwrl9vtwb-{&Y8$EK_-w>qiYs%72>!5idNG0`g6vh|6mu0V>b|{vu~25X#kbaSWVdv z#zjr<1$ zbb>=G?@$=CN;an5ylY*rm|${)>~vLy-;1n)#pvSD%Wq?Kn%k`U4W@#bJd&(h>InIx zBRC>nfB6yPu8r9}9Ti*n0_EN(C1+JaHK0+oJLQEWQ}T-Gz_5Q9$)9#y#>m(ontfxz z!_XkZlAfE3`dzFlD41JoJQQ`ficukt;TfGw=MKAvb4~L6_X&x!Yx4&lF4VYu` zUqCGE*&od5%H>bA$Um_WgRNbd`NzoL1~i$S!{yxd<}N8jzW%&J%xCHgxr`RN%Y*?m1lu{ z>Ipt|D=m&|$`cQ=DHNLI0RwIeJr47{#RcgQwNUN$$CN_z;7XW|x7qn4%Hej3dCrmQ&?!-Y zl^~q-qvd8ky@R^z_i zjFXquschMr56ZHWcpH|2eOVf3(|)MUp|flFk%6PYq*=~aExnD$%jmi=k6wNB*E=ZwD?0I%t>oa{!^>F3z05*hi% zq=0{jZ>LKh(c>rAyYFz@=(jLo;U@g0uZjl*xS#ZL6P7=s^PVyg< zAm3P=7wa}LNFl`K_&iTycC7BRaFXtExXeb6N)+bne^nCZLwwDxrxD&;b_s(3I$<-2 zKeF3}`OD1I zG8mb7Tgc^AHlSs3OxuM)jzqDtBg#^BM+5wbi00LRex5Rs5*+p2`>=R!%FI!&fTdwk zG|poA;#c|o#lwCFQ`>{dEW_7-T%6>AhX?HJdAF&M$KlkoJ5m%hAzM;`UR4g}n7zo( zm;W_O;JO=NmYN{o8qo(MRv7;a#zDD5*|)X4*Li_4Wj zwmGm0K)-t^$6_xRxIDfJ(J<$TBi;_QY~KevCQmzWTbq{3xHu$dFX=GVO(_^26jD|t zc#!v~rH!L5<(_(|b+GpMiiVo}R`!sT-Z@Q|Y|WiIFZ;Q&ymf*CQq z=9;O`GIDkIv1%+*-o&4SB9}#4K!Kfu(_r92|DDh-SvE$DA*-?8qY+=pmml)uW zRgCh7YBdiF_+%Kq0d%<7>@V)hjtC*=bg1_sjGwMJ&KdL-h?Z8siRF_Ueeu(7WL{vR zQK@oH@#S;^5wx)KH1J}6vEG{+KWu_Aq)qp)0S0$ySsR_5f|p)8y~~S(zG>5>q5@Bq z4x``Gsg&LEBayL*$`F>*%?7*_Lqm-wDv zQN}wxI|aH9v!M=mO2q1UsVf^y513wl;AehsbSYjMTrL(GlXUO&wHbOid9;4RCVKx> zd!ujf_i+E1&1tXS@6+1)`jkCe%b8u@_fEOqgA4-l|5SC3?Ue=VqK$1^9cRV1lTJFe zZM$PTEAH4%$F^o%qIq>Pi$O#y0Bf_{QP{j!ua3ekPr9wQ^2$3(}Bs;$;HhM8`wLX-m6)`z-~gK%jQ5BA)wvg z6m+&?-F(`RyqSgtD4IbeAEdn#cI1rYw!}cY?D&(FUqb$S|G;9yefdKB6#WFROedsZ*2h*KqGFcJwjnO^`JKVIRrg5TcX7Hk*!gxtMmeXli@oK3k8|d#qtj^ulw3AF&zVHfThiwrsU^ryttc-LRD3K1(d-$ zC5mq-83HiJhw1I#;PSySHViIWixX+8wk*hk`es0JGiGqA?s~9I9(mp`R8ytU>vsO1 z^329;+}n+VJ{WrwD(`*}2cT@7V9EA;gYRFLOY4=(SYLh*XYJ~UU)`}KyI+W3`Z!<5 zR_6`gRzTT{B}{d8FH9;DATm=7C_jHZ?JZ;L zxt_pe!CLRyei!h$8OzW&3L=(UV1nbroL;c&xo&c2Qx2w@ag1-vx&Qpz{}ilA@efXP z|ME zzY2iCDkh$iLXR4&KUasCU#W6( zJ?_b&rj%pAd9g^v>dU3ZbQVu(J8__puhy`*=;)*R^2L_Kd1+#3+>A)d( z=jnBuNhyo&X-26xv;F1vnAUIc~#XGx(YLav~Ln9O6=Xd}HoUXqGH3#I!*TL`ACDgTD^%&8l^@}nKi$qiBBy}Yvp z$)7zr@H91NuQBd9s|D{c<$~cAv6qJT&P%;|yqf>m@m`UTAou<%?e#D<l6_>e=GsJIzeV}NnvRqL5_Qt41oA(=CrXomXl5Vn+ZD+ z`s*Tmv+YEZKMJh3VYj37Is88E7#Y7z-_P~+8Z_l3k!p}&X|s^tMS{;u{4b!%ST3^4Y(Z>8s2v z#_sU6c>2ABYPBixGNn(F8?hOFk7r|YDg|weA_a&~=lX+~v*}FY zr!Ss}lWEjLVqyw$)@GZAOPBt6qt2(KT~9IH-TX?GCP*^muQMAL_&SYoT)>=$k5$m$ zPy;(8hgz0a$WtnYn0h9LtByNz`)2RFj;v*GV?VWDbshg2YX_c4v@*EnG_r`pgtA1@ ze5;LI;w!mR)&w{rUh+&2X|c?pY{2e;Zl;vJd^ILKmFylggY%vo2d4+~ZAbeOLb?*5 zs;g1X5yxnwWzpg-@fz?vHWxk7D{X~F*5c>aiSY)+X_Md8z^#&^lnAcajS)I$H+j;Z z<1RDs$wgTmd(KUdXLlNfy-x$+)p%r$tP_LMiwp}nEe5)o;{x)O|TjA24iKLd@cUAMY?JP>iBy};MZcP6X`W& zEh%*jfZ6eT-7cWn=_We<*kfo6FSi+{fN+y4+8j#X80m{V>ic&IC;dZO{b}T5jI(c3W zlO_l@FzF2fF6CDQkiSeB76CzGe2H{-Xid&+h*=if8y)bNaB4oM?FI;%&rUg+8( zSxuP>^0G_@deDoPGNwob7;XG7(KK0NxWOi2A3TtUS^qVH|+m+zFQ`~Q&xO;ju6 zOnZJ*F&{cL!dWDPqY~cap+CMQy`4%1!Guv9o<%)Au(0GQwhq1vuWH;142iQkJ8hkw4S;aTja^9+3RV!HT3{@^0}_d?F1YZ5UTt` z_4R+m>M0;O2{c$br!4;koxffd5orHk0)@CdQhgQERjHjS&UaX1c9v^2;chg9TB!JK z$~D#Z5;)Zi*BYsd>FSO%{y^PbY?NsE#wSDJ3K^4}#S-X6Dct~O*L``>Dagmgb520i z$?KLeyIQ$4fqbLIaag@by2vf(^!;kq>nQxgxFMo7Nq(oXi~^~(0nCfs2UHM29aYTw zUn;zQ?VO0pl9gkgtl(4vm5hp|4@kJ+4i?|izzaFiOguc10uzS^xStaIsmXwekx`0e z4PGPTdO+qD-d|x0XZ!#Q{#|H0Ba98SO=C0ctzeI#*?3vdbiMq&00M3FG4*APgq`h) z=ZLv~=EMTtFA2*R$Cx@(m<{X4MnZs}SzUg2y^cOanhJ1c%K2oH{cXO-Y&u z0*;ihR#s@hXMo@LQ~2DaLW{um&Bb6;?vP=R`GP{0me`4oLozd!u+q|aKan|163gA0 zr<6fnL2&_u)K75OFjSg!5sHNViA&Cowm|edoN5K=+99cprKsVmuvibHQQ047wZ)K> zl3Vsu(*5yA%1r`ZGJH@A@5Dew%ZtGKBQ@2Nt4rH@-Au6IrYb+rh21>$GQ$BRNJCd! zdz{Lsd^ynM(j7K4-xgX_B~>{U0)?g_V}1-JAm0p?LfPXUGsxYG`$$XhC(Q=A*#AtD zUjB*PU~=aB*V|G;Xb^{p(ToSJ1D(_%$955mX5ojqsBjzRvDTb|AY3~HzN<(OPR6xi zJ=1jnkBI9LVsN)E_e!|&x?a9#h?7V&G~)$gSnHrx6uhLLM$m+)&)k5%I- z@tldMj}!9s3!hi}?KgW4yRCyenkiz9ENxW8?>aD~DsfdU(CaSD6J6sVTct7s`7;vo zc}ONNh$6i_DC#`SEptnXI8O<16m?mU`LQDsJ30-TKI?)k_9yTD<)_FJ_1Z^ar;aSh2Yn-BM?_o8A4l&3bS&PoX`tk&A{#^orbx z%d8ygcTkQ{y&pMzjl5akP1WVo3ZDE1|2Y0Yb36!mGt!}#@sfi6ZsN>LBA3Tl!3!dy zk2a$vjGWp)ekuvah#PGP?4pAhQ=_wi{-|JxOb|a?bYm;X?NFhdogDk_tv$vls35yU z@eRSK(R7)$@1!K!N*juFh*+LB3}Q#09XupH0HtEDiyHN8*tmAkd4 zE5Vmr4P93G%^<`j_rRfq4if1LE|(TZ_I=%e)4G9BYJR-Y*hP&g2mdM1@O2GV-EV|`Zl%`>rh?Fye@wFQ0R!i6>u#m~yM5hZNM+m^6AlFtMrkf+Fy6=zbc z$UMmMlZ+lUD~vr$4DBbf!gup@1j6(QPaCF20%b;mJYm34pu#dQoQ{|U)9+x9m^CGD zb@LAapP6zbv(Py)BH;0F0p0E)ureb0tO?sK*Y1SPm*lkmr1t34vPOMKg`o$et0L99 zrrODA*@_Ef)tHYpRtTksyAI%gpL%i?X$s9n!hkA+uFu`DnT`owz`kmk}MLDH8 zk)XCf(1chGeA0Yj|8T6I%qMJ_bE`bmr7&qH31((JN9j^mGCV}p@A61L&_-{M_1C^0 zudU%!$lL1$SFj0Ez=azb<$zqrChXZI+k|FU5U95k@wgBwQs8u(!|Lni5IF+9)ANWYe%1h(yGQj{JEm&`NAed`Qt75+%|%pkrw#Y9 zZ|i9VBLDI3Nm2k=NtN*j4=S!POECW2BtBcChW8`o+gALIz`MBVV$pkEZtv5w3aadk zvjN`e4~A5P-E@6xBi=YZSZZI{IULKb=x%;jL}Z#SNPqWQtt+ZIUQX50AcvRAxFf;ch_Rf{KRyxHvRM9Kj|tjp)#R)LbBahV6@9^4m>& zNQ=!-a-nlDq3lHNGa_QY^_ z6kbYA1+#GAppaG|VF?>^@i8O_3*xad)ojmK@|J5iY*au1Sl{y0Qa(ESKmXPzzpk7! zkP=_Z&{3HM(+&ADI^-qXnsNcZ=v3OuKh4_2=e4P|csdCHxt}Fk5+9c_pORS=uI8G* z4Wqdy8#<0E9OEB2;dRbtxrFVt1pFR{UEn9^uq){q!N$k@{$d_7x;=NhF3o0&Oj2MJ zWLrKQbU5rj6T|D&3(w8RFKMvd+P1M{G@jG%g_c5H4@?IpVT!vA#PUShPIewyjla;F zwXzHCP~9#AOLv@kge4i~cNqyDc{?pKUFS?#t{jii?~aO&HZS!?ZCDv$)W|Tawu}A7 zx{f;B^7%r33lh;%$-(Up$o+HV0>ww#L~bdj3+Y{bQ@*tMlF52MKO9wM=s#~2?fG?O zhyb*Y+sFr(35@x$vMFbTb=oA3iS={$n+X7Q704nAFDLMVrZNw^MtX9aMrvc4$#Bm^ zINf-rBp7x%#Pt;Q8%na_i%`>IiNg^1tR=+tT`kwH@Io)M{VZvs-4xm^0ND)ML|WGI z$XygP{PhYFOx$EnGCS>{byIXD^XL~XRbD-WiSQp5qxj`)}SWTE&=qr8E0Za}kq z(C(MnE;wb6pq@&Tu!^zE>@`{|D(DjcSC)_U2uKS&dC-M};xq-81P!#%kSrEBAltU{ z2PzJ;C3^|PUI!(^`Cao5zg5v+>sxe0Ydv#ytA8m?Y_(Zu&l~s(03EQBYId^TXW{&5 zRaG91vwV(p5AL^W`!mN>Tz%-EZ`@02@2# zW=@h9UJSMB=0HhaJ?}1>RD;4cR3CNDv(Ng%C3!qDqK(p&c?ek+R5di&4g8gr=({M> zK(?VL9&8BATTk56O;Ys5qh_FIe&#z@5-{;zC~1SCGCK{o6)081Nb*)C*5az?_$50F zQmdare-gtobbvpMgScrsxoVIto7&Edkko!;HUnL}oxr+=0Z9!9iG0=5Cp%D>!h*N& zpDmu5Yp8Sq?lF(dxL0Q&H6hS5A+T1&i9j$N^h-;7vOW{D*=|;^XIKk_5NA)hM=*)w zum)ypC3dv3>xF(F8~x*L7DkU$VO|Vk)%*rGWd8;)`j6#$z{4=Ptg#6uU<^W7Aq@i3 z9IUK2O&K>S4+H{#iN1LtXv{b87XB~8cH=tKa;`Fwz>i{U@PksXr%#ql9_S8$2XaP^&6nU zGq4)nt00tIIi$udqETJRB&_!`OsnjGGFs=TLPBx zp7oaa0SNM7fV{Lu5eKsY0~Auy647PxYZIoRdN-MT1Lz`HweAT*0|Q7V_Jem5>Ci~S z`HPF4ewZf)EGK(Jh(OFD7>vWM44n^Ao}AcxR2aLAK7=GBo0k-@4N=n;)@f^q!x)iV#JW`wI12sFnhDrjt)@faAkphu-uyP{b;Hn^~iGEli6E3LZJqsH%URAG< zW7mCG34Z~fN#tjy*+a9DTE`KSI-akOug(T_KD50?M%VkKi1xr=L>8!DGIFX)WVrbI zcNZ!ti3*7^a`N`73X)kGRory?d}SRY6rroV;*n=&L1Tu}?`1}YL0zUSW|yd^S!tfX zdnQEOKP;E6>RpcXK!+yn&8dP|Ay8?`kME2*|IrAf!Lj~WH(=gPV+`l4s7X_uCbnAI z)~&iDI+j$Ajpoi(}fzNEK5M*&Z|d^Y-#afxb{Hdo6SODGYTXjoKM>dIrU zbY4uTdPq-_Sh`WqRGJ738E@L+E0qK!z<(yJ^3}#K?B^Az0#BC*37;JD*k~8paK+E% z0iOpmti_Hd>4=9zxnk)|w13Vl{b{t!`ZS8#Ch#%FIgGMPn?6P=K-Ea=u~X>soa6%e z7YXZc;v4Dzvr_@Io$8V^B7`hdY${MyT6;zzN+rj-)TmGIhNFEdFk znYG?Yx^i2ZTfFT;zwfO#@U7av$s zd|z^SN6Ry~@KihhhY1lDpIiFiM3`>RlulCD?9&Zf2QNZ>HP%RSf!!mVZj2Xk?O}NK z&;%t$75!W1Ue8y(CwNYCF_|6CpBjZZBm}V6ax4^v$1}_x6O0hWxjv1uNX)G}>OHS) zLa80kV)Sm}DG|OWP!l);M!NXKlz(>aPm!VGgmL%j{ES=X7ny)by4obCJc66a% zq${N@2(=3$c)uL;rQ3)KfW~rL3;I|IL^c(@1?HZE|xq07AzZ*f4_TI%J5A&4|U<_`Fmo!iA|CwA|Y z3cG)j3xQ(3`scX+^5E|P9?0s_M^(&YS^biR17|yPi#K&^_dX7H&5e;EsL6N@s8dCn zx$%F5lW9T(3&GJEl7hfx)r-sE3jtzB(A`5z1A`9KQaixnT8DR^?Y8Vz&$5AQ%>tIO z5oy?ld?=PZVZU+MsAWUN+k7L`8|*G4mGd$xU(Z-j`}%^Vv*OxB93@Y;602i&)edYI z&hKA!%Ny(&Bo{|LQ2)&7E5xHT`4GFGYH@TCG@ z|NQ(QC>^(r;$@k+{A-vSk<_+-m!}{@R_zh0&1>w_o|1mIMOvVmwWHB3@tg1{P$@Km zuH!kfN60oRonEtPfyJw3cpa}mW?hoIl$P}jhK<1D)F7b`AdCH>oZvg``RK`d;-F3E z%@;}mVj^+mKu5ST@vm~nI}mre6#+32OF33&?OiA2GI=!NM&arhfLDC$!51Dd$=V>~ z#E&bdM1F6Hz*>hCs}hRw_w(Z9Km$KZ9+6#b$aY*O=C~LO?-f&fS$a5Q85_pzaJz4< z7ObdG+^*<{Om=fv47nibzPYjid^~s1T^2ppbfkt^+go{b7je)Eske8s>eT(e7TMJKg?de*;t9WG6ELQJ*^Ma{pRy3KseMU5P5AK3s{NuJ~b{+S=uh*ETwZ>K_anD+TIP6zjzFnX|AHK>nNuL z>2%pxa%cF9uE;0T(1L1BxvY5}u!79H`ZYn0m&CXKSO%mM(d`Or@vBb37y=H9`DOBZ z5Hi?y(VPscV&PS6vwq+2KjsGxV7bvUBR3P0v+*)e+}cRx=YHC)a88pDM$Uj<$$EK~ z@J#mjcjN(8(VjkxYknQZKs{^`UN&pTX&!~Yzu$56-^eMZU+q{nlmI~JGw+SrxSpYl zQ)*&-7f1?}gOJR5kh_tZ#)^?l#;W_8Ktd9Yu;m|FM1}tW?M64qw<7{RGzYj9xIsPs0oZnI zHz@b3*%lD_3O5W&I~e-NzlzpRPjH__BGZbY5lJ?nU}a|II?P-`T%vNIm+<}7xV5l{ zlfPNJQ#&SkyZvAEqTP$^riYtq`})W*!4T%m;%&;zP1XsS0(Dzw?jj{AeKRXf|7Gmf zGuDY1YDy5@)lO3-Ol`!J>9z6dPn#%(8`gyxPbJU)ld5p_;>Fnny$LN2zRfIZkPW-F zz%Zpo4bZbA?hIf7CX1@9!7iwen)tu+W}68eKl3c2L^nq1Lc1IB_6Y9S3OGhr*I?)r} z0b-ePslUk(bh540#F~U2P_({vw(NBK>L3mvCe5kiW7)C>kc6bHJWzy&)`Qzs{pYR? z;`Sr}<&{5_@@->P&=UP_YF268*%T!livEEF=pQE<-s3+`1dwrx5{CiY-ngzb*|6)A zdTmEU@;DqswKjV_9_+4zvWgPt!~IMja8$<8ymS}(lpfVBeg3~wiNBZi?m97kcPJ>d zT_)99$Tch#k3quE>gE(;!yH60!3zFq4_I37y*sV=w{Vc9GFKkEV>(7O9;Yd8v3Ve7 zGk6$kdV6$0CNCB*Vs;JUiv3-2>#oYQ$AriTxU;EbhZdRo?L9=`7$}$c)+}PeW$rqA z(3xAioo0Tlm0J9qd^VPv`F0l2aE&uC}K=?E;UwvB%N zXuvgc?}qx_a&om5>550%&UaL~75e8(?#?D7CP0XGvY(YSuqjB_aDZ>aoJtbSI`By` z*@^&r!7xRPNt@&4=Dt9E=OnEVNhijI36hw%5H zrp-rTgjXUBnj6=--a*YT&*t%94avYoBF$9#MV=gZO^Y;k{VLM5Qs=cAI`G#P>Ml9W zB38E|qhRaDBTT1M$CZBkB=WDg8;N)%&%7{(e(+TPBJExW5$<{o17LYOYB*B1cG}FT zVPpwj4J3>}A+Ljvq52*PoDU-ll|#m39BWF%42hDin3!`>_(@S18Ld{-DU#%h(g{Xp z6w)ohaxS`)rRdR7tqT0@A2h<1<8k5NKf7H&pif*g8a5j*4FvlLr(7f9cZ_P90QVYE zz(4)Gv9%KDE%lBzIhW3xTpARWsrq@SjE8PV=#5eo(@PEM<*1@wB?m=uPRVqV`?YuZ z4Rk)ZF3hkY2D6%J<1RGt&akg`X>0V>Cw3r8sp63I5pfMlBpU$k9G#VfXf|M*e`{>Id{1qi- zb5bV4-=>pJF+DobtE8v==eRS-rjU4M1NA*nQe!DN0W0GjGjBrzzp8NyucP!6Me0D+ z4#?;Z*Hnmou8&A#GWL796SrH3;IQn!wH7K-VZ<=x;VU}b#}7*+$<2Vn&1Y2E;_ z3$*?mLOcmtL5c^pqJ%tkzbVD!_aC|y$^*$!^fr2#sao`-xVgieFZRV(qtq#{&Wy2E z&Q^DIyd4Rfe>RR)*U&DOn z3L9zBOT(sBsYv)OKh@&ZH~0En(3z_ZYYTNB}MGlJzYcEGnpR>D8GQftxH$EHKKp! zJlB!Na~%eul9}xBv^1AGMl)kbe5q`+`lMCAQrE&VS{>58v=8zQw{{c=Lth^!e`?A) zjYYj>qC~=6mVsibRNYp4(wO^w(yhn(g7md)|G5>vO7@|=T~ae@>`EvNlqY8P`O^gJ z@sBxtUS>yCYkx-+rS?f7C$YvQFN`Smg~O&`V z4UTt8ho)jcvPGd(nz`I4C{NRmoUhY99$)Pe8t#D4a2T^sIzbVoa(x`0M``e z$@BaHEIjibI5*xvHY8{2aQBX1;ULEBHBRhb%(}?MQjUXCMCGSig0aY+Klx5s5r;@+Qs$K{h~%|^5mFm z?%|b1K7C^rxjz4Zk-q!SHB-o4=;^&{~`N3=wX ztYZ;c1ci0+Lu~4UY*f9f23d)wU>`Hv|ISsHSe2$01oFJ`*9@uZWf0iMQI#9da0gLL zmtJO4L1h{qvqw9JTGB+J?>pS12waS~K9RhIOr-Oa%!}S2wDwY;1Z#Zem4mAk_UOem zHLO*hMuyhS|F^b8^`F|(;!n;0LeV$+mUB7533(2iiQ3J5P0G9TA51Iax0HCw&c;^0 zI>ly#r^E$~t60p*6FU|DZxFUq+^{mw@vvQPg>D892_8Y~uFk!blWPQ(Cc^oh$ zSTXHA|7_N>@XPDFw};MjEL^ANkyP|Esc)RoM2qSA9*3ScaB}eX>xZ9pj3%j>gi{u5 z5N450!js`=SMp!3hp-3%dGu=s58dX@Y!XR|Z*Dxs_sH>3b2KuurHp*8q76_CkaY79 z^l0$68T!KRF#|fwQBDZ6AbzWHy14Y3jMSfLrYXL8Zhy{li(wJth8uoyE{zWHtZGmZ zhq%s>y~mDoobGe^j+6gO&cbZlZ98c}!xBs3t`Y+suCmk(uJpo9(}81`ek!XQ#t?Dq zQ<~q4m^M~DkDklV{hqDE*4hI`Ga7R%=*?K>>9u?LLh~h*G~n|tV$1dGx(sOS;K)($ z8cprQRHd$OHR5}t;WJg*HHT_fRXoGL-C8$WOEwLZvUd3%$Gft##RpcW^2H5|kp{MTXUvERrX)>e(q zP#z{k;y=tWF8B1TDN8D4M+NaC3shg1lwq?&5P?66RCkviMjf`CeMjb4hRVRRC^}(A zl7V8uf7Zs8))lU@CX+)9k0+N#+>thx7NF`|7B+@YHt=430o3@Ssh_t32!?>~Uxd?U z0Y&_I?SW$XG71UeV*SRo2KWU}oho8~Qs5#H#nvNtPf^`EAodYl-+X-9I?Q_aMlWvJb*N6JJ(#wYia zKzWm)_}@Kl_^I*#IGfY$c8)RfWXhTDRY}iN1H&Fy0{zhk3E*uUc)E!mJx3ulKpowu z)zq_o+cgoD`C&a#;cmOy?F&#JA%lj$XO)tEIb9450X9>MQbqvu$Sn=gbE$t*CS`Pu zjLQ;EN9h_F!bUy2PSac*OO&EKLkK%xjq4*b%KGNnfbcF&2nVm{Lke9AqTJ(}yg~CP z0_-!-(O9eXvXLQJHc*WX3;ZrQB^cSW#p7zNwWnh-L`kwK$4PcJZDvJ7#qB~FRfE_> zDYDDMiC3!uf7ZR|nU2=I%$XLj9CltqwRppP_McXteYE6_4YZ}fGfHpXYTel7QpYVe zX_XLueK2)qelssx-SFx%%6cB@5+%cX(J#5l6pk4hSt(03h<~L|G&ctqt9{A(R1-4s= zzUY2)=1hXPLD%1Wn=Rl#H$-i|-cFv5H#&WlZ8^{I=Xn_{U1R)Ls@JT<-yxa#R7X@7JACdo$CtM|Kc2ppe!2N_aq;k3_(4>~ z01ou^FM}*T*?&n)*RK0*t`)}Y>Tj{XM~2-HFY&+k#NZdm8{ZpZ7@WN&bFBL5xaAP| zvMOim2U1-Ko6(?kW7*L=r5L3lN(=&h)3vjJ$;TPdcy~kjllX^ZQC6wO_GILePQnk2 zTi!NAu`1(Jt5m8OmwanS2|>eoEj$ocmS62kI`ZOVQtgp*6I;)p*6J;LKs($8Y zWyz|4Cby-DV(@5vWjta$Jvd453esdvv_W~+k$n9-S;$&}htFGIJJi}O5+@8G>0AC; zn*+vUVhr{#MFN-VqMiz44t!^b_-g#N=wY=}RvgXhL~N<80ot5BQq$1!TsAuvp``{U zhxu%Z2l9)@o2I_X00h~8jBT_nYxE-Vm zVmkse&WvKkcIX@Yc9gG~&J{+05*VYBk!m|PZFyugU3zm(#I=$z%L=_1IvlEj?jiry zOCe}tPi+9{PV7Zw8TOqR_`N1@g@e$<@pR!6JK7LmzNF;(^&?cGuba)vi-ba$ z!7U#h|AIuLS-Uos#~ChdY;|uSs^GU(g)YdP4T5(2{Ar&6-j-N@D%p(_;R-aQ;Gt%t zIr!pl5CgsW9yz2P4dU+;pD}Y-ZX%4KrLFM@GHQ^ab8nwDi=hx1jOzI65bc4!^3Gw> zr>&`t@tO7qJX~}@kn4ReRE$KDoyzHJGZW6PuTEscAV>1v6=cvpOHTe<+9jJ-{RCu$ zTeq;ua3#h}&c7|lMz!m)kS9EGJ#Qp^rkJC#!Rz&r9=C7`%Lwz7aq%VoDF2AB(%ZH| zRM!Z{F^`OiW5fF^6AG+G*bCAvh^S` zB5&eQ1$VV)FR!`;xmPy7B#(Fe2);iSSQ<^+ZX;B7l{W(j-x>3^#%-ASuW^D*vpf1* zCzG>}VXb*LccPn@BH=)&ua{Q?m-TcFNI@j|=n&OHj~%V3C1Y1Blr$D+vG~|$A#b%Y zbEIiNV4|8x6IT z`hv%*Z+xUJ$4Xo;$L7{WQGbU?eIb6D`|k`8F&R4_Kr11r_9ughIAc{FTM8eVm19P; zLa!!+H*wabOM$SI54Tai;9}lFt9{wH$xxNa;C7s1guoG=hC415aP^)#9FIw3ia-Zv z$KNjmS}9~Y72Tp!DTC==dCaHONo@^uf{?G?i{~oE3cqYl>}@mZKv|cQ07K$rvoLeC zYA?=$zA^V<`G-F{g=UzYo2MsRW$2%e4+0{TFRtdvUa117Z?*TsKq%Ld%&_O8FW(~L z!v?wa%uxanEKWnP1?H%-h#{?cE?WO4N55UyWmarz;WCmF!|5=yTt-M1YK6?EWnn}| zcp;+7qSJWPYxu85GzPM<^gV9*w%;x$ z#JqCi0k=T$2ac^A?1faRka05oPm>771|;g@TgfF(|7^1Hns*Yc|2FlQ2n*MJS|857 zy*68)yRk6(@j}5l12zA(Wc@H%#ks7UkqK~dWNEh7dfjcb(PFs9qZXYM}+G|LBS+`2qZjkz2 z)Jy)NIC-T*PB Date: Thu, 5 Sep 2024 23:46:37 +0000 Subject: [PATCH 07/59] chore: release main (#13065) :robot: I have created a release *beep* *boop* ---
google-apps-chat: 0.1.10 ## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) ### Features * [google-apps-chat] Add CHAT_SPACE link type support for GA launch ([#13064](https://github.com/googleapis/google-cloud-python/issues/13064)) ([0ee300a](https://github.com/googleapis/google-cloud-python/commit/0ee300a0497968aa2c85969924b37f95f67675f0))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-apps-chat/CHANGELOG.md | 7 +++++++ .../google-apps-chat/google/apps/chat/gapic_version.py | 2 +- .../google-apps-chat/google/apps/chat_v1/gapic_version.py | 2 +- .../generated_samples/snippet_metadata_google.chat.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 477ae9480c55..d64187a095df 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -5,7 +5,7 @@ "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", "packages/google-apps-card": "0.1.4", - "packages/google-apps-chat": "0.1.9", + "packages/google-apps-chat": "0.1.10", "packages/google-apps-events-subscriptions": "0.1.2", "packages/google-apps-meet": "0.1.8", "packages/google-apps-script-type": "0.3.10", diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index ec2485ff2775..2ed2b6a49d98 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) + + +### Features + +* [google-apps-chat] Add CHAT_SPACE link type support for GA launch ([#13064](https://github.com/googleapis/google-cloud-python/issues/13064)) ([0ee300a](https://github.com/googleapis/google-cloud-python/commit/0ee300a0497968aa2c85969924b37f95f67675f0)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.8...google-apps-chat-v0.1.9) (2024-07-30) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 558c8aab67c5..9413c3341313 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 558c8aab67c5..9413c3341313 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 6e637d46d014..4442b6c5505a 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.0" + "version": "0.1.10" }, "snippets": [ { From a7f0dbfb8a5f3518cf1acec86b7bc3a6151a811b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:45:13 -0400 Subject: [PATCH 08/59] docs: [google-maps-fleetengine-delivery] update comment link for ListTasks filter (#13066) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 671458761 Source-Link: https://github.com/googleapis/googleapis/commit/d3029316f8793ac5178dfbd1ebd366b80e32dd6c Source-Link: https://github.com/googleapis/googleapis-gen/commit/fe4884a9d2013647c34ddcd4f8df1d1d88c9eed4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtZmxlZXRlbmdpbmUtZGVsaXZlcnkvLk93bEJvdC55YW1sIiwiaCI6ImZlNDg4NGE5ZDIwMTM2NDdjMzRkZGNkNGY4ZGYxZDFkODhjOWVlZDQifQ== --------- Co-authored-by: Owl Bot --- .../google/maps/fleetengine_delivery_v1/types/delivery_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py index a8e773aae506..cd619910ac96 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py @@ -509,7 +509,7 @@ class ListTasksRequest(proto.Message): don't specify a value, or if you filter on an empty string, then all Tasks are returned. For information about the Task properties that you can filter on, see `List - tasks `__. + tasks `__. """ header: mfd_header.DeliveryRequestHeader = proto.Field( From 1a2b325c0da966131072673e06d17015b16c7a1a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 19:17:09 -0400 Subject: [PATCH 09/59] feat: [google-cloud-documentai] Add API fields for the descriptions of entity type and property in the document schema (#13067) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 671472365 Source-Link: https://github.com/googleapis/googleapis/commit/003e62665190becd32d722a82ed3cab62696225f Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf0196fe2004e1fbb1edf5aa8d8ada653e10d62c Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRvY3VtZW50YWkvLk93bEJvdC55YW1sIiwiaCI6ImJmMDE5NmZlMjAwNGUxZmJiMWVkZjVhYThkOGFkYTY1M2UxMGQ2MmMifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../documentai_v1beta3/types/document_schema.py | 16 ++++++++++++++++ .../documentai_v1beta3/types/document_service.py | 4 +--- .../client-post-processing/doc-formatting.yaml | 1 + .../documentai_v1beta3/test_document_service.py | 2 ++ .../client-post-processing/doc-formatting.yaml | 15 +++++++++++++-- 5 files changed, 33 insertions(+), 5 deletions(-) create mode 120000 packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py index 60d49d8c76b1..1c37739aca48 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py @@ -189,6 +189,10 @@ class EntityType(proto.Message): type. For example ``line_item/amount``. This convention is deprecated, but will still be honored for backward compatibility. + description (str): + The description of the entity type. Could be + used to provide more information about the + entity type for model calls. base_types (MutableSequence[str]): The entity type that this type is derived from. For now, one and only one should be set. @@ -220,6 +224,10 @@ class Property(proto.Message): name (str): The name of the property. Follows the same guidelines as the EntityType name. + description (str): + The description of the property. Could be + used to provide more information about the + property for model calls. display_name (str): User defined name for the property. value_type (str): @@ -274,6 +282,10 @@ class OccurrenceType(proto.Enum): proto.STRING, number=1, ) + description: str = proto.Field( + proto.STRING, + number=7, + ) display_name: str = proto.Field( proto.STRING, number=6, @@ -309,6 +321,10 @@ class OccurrenceType(proto.Enum): proto.STRING, number=1, ) + description: str = proto.Field( + proto.STRING, + number=15, + ) base_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py index 5fd1139ddcfb..86af095d60ab 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py @@ -396,10 +396,8 @@ class ListDocumentsRequest(proto.Message): https://google.aip.dev/160. Currently support query strings are: - ------------------------------------ - - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` + - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` - ``LabelingState=DOCUMENT_LABELED|DOCUMENT_UNLABELED|DOCUMENT_AUTO_LABELED`` - ``DisplayName=\"file_name.pdf\"`` - ``EntityType=abc/def`` diff --git a/packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py index 7233ce2c91c1..a8e8294d811e 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py @@ -6117,10 +6117,12 @@ def test_update_dataset_schema_rest(request_type): "enum_values": {"values": ["values_value1", "values_value2"]}, "display_name": "display_name_value", "name": "name_value", + "description": "description_value", "base_types": ["base_types_value1", "base_types_value2"], "properties": [ { "name": "name_value", + "description": "description_value", "display_name": "display_name_value", "value_type": "value_type_value", "occurrence_type": 1, diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index d95e1c4fc542..88dd09382f64 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -161,9 +161,20 @@ replacements: after: " 'ingestionTime': DOUBLE; (UNIX timestamp)\n 'application': STRING;\n" count: 2 - paths: [ - packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, + packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, ] before: "'processor': STRING;\n }\n dynamic_config_input_topic " after: "'processor': STRING;\n\n }\n\n dynamic_config_input_topic " count: 1 - + - paths: [ + packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py, + ] + before: | + \n Currently support query strings are: + \ ------------------------------------ + \ + \ ``SplitType=DATASET_SPLIT_TEST\|DATASET_SPLIT_TRAIN\|DATASET_SPLIT_UNASSIGNED`` + \ + \ - ``LabelingState=DOCUMENT_LABELED\|DOCUMENT_UNLABELED\|DOCUMENT_AUTO_LABELED`` + after: "\n Currently support query strings are:\n\n - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED``\n - ``LabelingState=DOCUMENT_LABELED|DOCUMENT_UNLABELED|DOCUMENT_AUTO_LABELED``\n" + count: 1 From 31b38106f13113f205d6e25ac802754d611979da Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 07:53:33 -0400 Subject: [PATCH 10/59] chore: release main (#13069) :robot: I have created a release *beep* *boop* ---
google-cloud-documentai: 2.32.0 ## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.31.0...google-cloud-documentai-v2.32.0) (2024-09-06) ### Features * [google-cloud-documentai] Add API fields for the descriptions of entity type and property in the document schema ([#13067](https://github.com/googleapis/google-cloud-python/issues/13067)) ([1a2b325](https://github.com/googleapis/google-cloud-python/commit/1a2b325c0da966131072673e06d17015b16c7a1a))
google-maps-fleetengine-delivery: 0.2.4 ## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.2.3...google-maps-fleetengine-delivery-v0.2.4) (2024-09-06) ### Documentation * [google-maps-fleetengine-delivery] update comment link for ListTasks filter ([#13066](https://github.com/googleapis/google-cloud-python/issues/13066)) ([a7f0dbf](https://github.com/googleapis/google-cloud-python/commit/a7f0dbfb8a5f3518cf1acec86b7bc3a6151a811b))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 4 ++-- packages/google-cloud-documentai/CHANGELOG.md | 7 +++++++ .../google/cloud/documentai/gapic_version.py | 2 +- .../google/cloud/documentai_v1/gapic_version.py | 2 +- .../google/cloud/documentai_v1beta2/gapic_version.py | 2 +- .../google/cloud/documentai_v1beta3/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.documentai.v1.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta2.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta3.json | 2 +- packages/google-maps-fleetengine-delivery/CHANGELOG.md | 7 +++++++ .../google/maps/fleetengine_delivery/gapic_version.py | 2 +- .../google/maps/fleetengine_delivery_v1/gapic_version.py | 2 +- .../snippet_metadata_maps.fleetengine.delivery.v1.json | 2 +- 13 files changed, 26 insertions(+), 12 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d64187a095df..a23882e898d2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -78,7 +78,7 @@ "packages/google-cloud-discoveryengine": "0.12.2", "packages/google-cloud-dlp": "3.22.0", "packages/google-cloud-dms": "1.9.5", - "packages/google-cloud-documentai": "2.31.0", + "packages/google-cloud-documentai": "2.32.0", "packages/google-cloud-domains": "1.7.5", "packages/google-cloud-edgecontainer": "0.5.11", "packages/google-cloud-edgenetwork": "0.1.11", @@ -179,7 +179,7 @@ "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", "packages/google-maps-fleetengine": "0.2.2", - "packages/google-maps-fleetengine-delivery": "0.2.3", + "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", "packages/google-maps-places": "0.1.17", "packages/google-maps-routeoptimization": "0.1.2", diff --git a/packages/google-cloud-documentai/CHANGELOG.md b/packages/google-cloud-documentai/CHANGELOG.md index 8b3db92af5a1..5a26f2674ae3 100644 --- a/packages/google-cloud-documentai/CHANGELOG.md +++ b/packages/google-cloud-documentai/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.31.0...google-cloud-documentai-v2.32.0) (2024-09-06) + + +### Features + +* [google-cloud-documentai] Add API fields for the descriptions of entity type and property in the document schema ([#13067](https://github.com/googleapis/google-cloud-python/issues/13067)) ([1a2b325](https://github.com/googleapis/google-cloud-python/commit/1a2b325c0da966131072673e06d17015b16c7a1a)) + ## [2.31.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.30.0...google-cloud-documentai-v2.31.0) (2024-08-08) diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 96d60af285a2..2fc98b45f209 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index ef56bd7e3eb3..31e4348ff0cb 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index f47545a8ed3e..43bcd3c8902f 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine-delivery/CHANGELOG.md b/packages/google-maps-fleetengine-delivery/CHANGELOG.md index cf4766e90920..5c90dc3fdce5 100644 --- a/packages/google-maps-fleetengine-delivery/CHANGELOG.md +++ b/packages/google-maps-fleetengine-delivery/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.2.3...google-maps-fleetengine-delivery-v0.2.4) (2024-09-06) + + +### Documentation + +* [google-maps-fleetengine-delivery] update comment link for ListTasks filter ([#13066](https://github.com/googleapis/google-cloud-python/issues/13066)) ([a7f0dbf](https://github.com/googleapis/google-cloud-python/commit/a7f0dbfb8a5f3518cf1acec86b7bc3a6151a811b)) + ## [0.2.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.2.2...google-maps-fleetengine-delivery-v0.2.3) (2024-07-30) diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py index 558c8aab67c5..668eac0d72ce 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py index 558c8aab67c5..668eac0d72ce 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json index 884ac226d0e4..685d1554024c 100644 --- a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json +++ b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine-delivery", - "version": "0.1.0" + "version": "0.2.4" }, "snippets": [ { From ae583456fd0a2f2e46ce8759918ebe3fc19a0fe8 Mon Sep 17 00:00:00 2001 From: "owlbot-bootstrapper[bot]" <104649659+owlbot-bootstrapper[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:46:09 -0700 Subject: [PATCH 11/59] feat: add initial files for google.maps.areainsights.v1 (#13078) Source-Link: https://github.com/googleapis/googleapis-gen/commit/802f7c8cdf887527e99fa9c0d774adfd33a16ffe Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtYXJlYWluc2lnaHRzLy5Pd2xCb3QueWFtbCIsImgiOiI4MDJmN2M4Y2RmODg3NTI3ZTk5ZmE5YzBkNzc0YWRmZDMzYTE2ZmZlIn0= PiperOrigin-RevId: 672562643 --------- Co-authored-by: Owlbot Bootstrapper Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google-maps-areainsights/.OwlBot.yaml | 18 + packages/google-maps-areainsights/.coveragerc | 13 + packages/google-maps-areainsights/.flake8 | 33 + packages/google-maps-areainsights/.gitignore | 63 + .../.repo-metadata.json | 17 + .../google-maps-areainsights/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../google-maps-areainsights/CONTRIBUTING.rst | 271 ++ packages/google-maps-areainsights/LICENSE | 202 ++ packages/google-maps-areainsights/MANIFEST.in | 25 + packages/google-maps-areainsights/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../google-maps-areainsights/docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../docs/areainsights_v1/area_insights.rst | 6 + .../docs/areainsights_v1/services_.rst | 6 + .../docs/areainsights_v1/types_.rst | 6 + .../google-maps-areainsights/docs/conf.py | 384 +++ .../google-maps-areainsights/docs/index.rst | 23 + .../docs/multiprocessing.rst | 7 + .../google/maps/areainsights/__init__.py | 51 + .../google/maps/areainsights/gapic_version.py | 17 + .../google/maps/areainsights/py.typed | 2 + .../google/maps/areainsights_v1/__init__.py | 48 + .../maps/areainsights_v1/gapic_metadata.json | 43 + .../maps/areainsights_v1/gapic_version.py | 17 + .../google/maps/areainsights_v1/py.typed | 2 + .../maps/areainsights_v1/services/__init__.py | 15 + .../services/area_insights/__init__.py | 22 + .../services/area_insights/async_client.py | 353 +++ .../services/area_insights/client.py | 773 ++++++ .../area_insights/transports/__init__.py | 36 + .../services/area_insights/transports/base.py | 173 ++ .../services/area_insights/transports/grpc.py | 286 ++ .../area_insights/transports/grpc_asyncio.py | 305 +++ .../services/area_insights/transports/rest.py | 313 +++ .../maps/areainsights_v1/types/__init__.py | 40 + .../types/area_insights_service.py | 510 ++++ packages/google-maps-areainsights/mypy.ini | 3 + packages/google-maps-areainsights/noxfile.py | 452 ++++ ...ed_area_insights_compute_insights_async.py | 52 + ...ted_area_insights_compute_insights_sync.py | 52 + ..._metadata_google.maps.areainsights.v1.json | 168 ++ .../scripts/decrypt-secrets.sh | 46 + .../scripts/fixup_areainsights_v1_keywords.py | 176 ++ packages/google-maps-areainsights/setup.py | 95 + .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../unit/gapic/areainsights_v1/__init__.py | 15 + .../areainsights_v1/test_area_insights.py | 2315 +++++++++++++++++ release-please-config.json | 10 + 60 files changed, 7828 insertions(+) create mode 100644 packages/google-maps-areainsights/.OwlBot.yaml create mode 100644 packages/google-maps-areainsights/.coveragerc create mode 100644 packages/google-maps-areainsights/.flake8 create mode 100644 packages/google-maps-areainsights/.gitignore create mode 100644 packages/google-maps-areainsights/.repo-metadata.json create mode 100644 packages/google-maps-areainsights/CHANGELOG.md create mode 100644 packages/google-maps-areainsights/CODE_OF_CONDUCT.md create mode 100644 packages/google-maps-areainsights/CONTRIBUTING.rst create mode 100644 packages/google-maps-areainsights/LICENSE create mode 100644 packages/google-maps-areainsights/MANIFEST.in create mode 100644 packages/google-maps-areainsights/README.rst create mode 120000 packages/google-maps-areainsights/docs/CHANGELOG.md create mode 120000 packages/google-maps-areainsights/docs/README.rst create mode 100644 packages/google-maps-areainsights/docs/_static/custom.css create mode 100644 packages/google-maps-areainsights/docs/_templates/layout.html create mode 100644 packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst create mode 100644 packages/google-maps-areainsights/docs/areainsights_v1/services_.rst create mode 100644 packages/google-maps-areainsights/docs/areainsights_v1/types_.rst create mode 100644 packages/google-maps-areainsights/docs/conf.py create mode 100644 packages/google-maps-areainsights/docs/index.rst create mode 100644 packages/google-maps-areainsights/docs/multiprocessing.rst create mode 100644 packages/google-maps-areainsights/google/maps/areainsights/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights/py.typed create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py create mode 100644 packages/google-maps-areainsights/mypy.ini create mode 100644 packages/google-maps-areainsights/noxfile.py create mode 100644 packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py create mode 100644 packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py create mode 100644 packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json create mode 100755 packages/google-maps-areainsights/scripts/decrypt-secrets.sh create mode 100644 packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py create mode 100644 packages/google-maps-areainsights/setup.py create mode 100644 packages/google-maps-areainsights/testing/.gitignore create mode 100644 packages/google-maps-areainsights/testing/constraints-3.10.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.11.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.12.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.7.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.8.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.9.txt create mode 100644 packages/google-maps-areainsights/tests/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/gapic/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py diff --git a/packages/google-maps-areainsights/.OwlBot.yaml b/packages/google-maps-areainsights/.OwlBot.yaml new file mode 100644 index 000000000000..35589d0d6923 --- /dev/null +++ b/packages/google-maps-areainsights/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/maps/areainsights/(v.*)/.*-py + dest: /owl-bot-staging/google-maps-areainsights/$1 +api-name: google-maps-areainsights diff --git a/packages/google-maps-areainsights/.coveragerc b/packages/google-maps-areainsights/.coveragerc new file mode 100644 index 000000000000..612fd0382a68 --- /dev/null +++ b/packages/google-maps-areainsights/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/maps/areainsights/__init__.py + google/maps/areainsights/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-maps-areainsights/.flake8 b/packages/google-maps-areainsights/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-maps-areainsights/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-maps-areainsights/.gitignore b/packages/google-maps-areainsights/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-maps-areainsights/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-maps-areainsights/.repo-metadata.json b/packages/google-maps-areainsights/.repo-metadata.json new file mode 100644 index 000000000000..2b94b1bde887 --- /dev/null +++ b/packages/google-maps-areainsights/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-maps-areainsights", + "name_pretty": "Places Insights API", + "api_description": "Places Insights API. ", + "product_documentation": "https://developers.google.com/maps/documentation/places-insights", + "client_documentation": "https://googleapis.dev/python/google-maps-areainsights/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1624013&template=2026178", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-maps-areainsights", + "api_id": "areainsights.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "areainsights" +} diff --git a/packages/google-maps-areainsights/CHANGELOG.md b/packages/google-maps-areainsights/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-maps-areainsights/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-maps-areainsights/CODE_OF_CONDUCT.md b/packages/google-maps-areainsights/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-maps-areainsights/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-maps-areainsights/CONTRIBUTING.rst b/packages/google-maps-areainsights/CONTRIBUTING.rst new file mode 100644 index 000000000000..f47c4696d0d8 --- /dev/null +++ b/packages/google-maps-areainsights/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-maps-areainsights + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-maps-areainsights/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-maps-areainsights/LICENSE b/packages/google-maps-areainsights/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-maps-areainsights/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-maps-areainsights/MANIFEST.in b/packages/google-maps-areainsights/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-maps-areainsights/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-maps-areainsights/README.rst b/packages/google-maps-areainsights/README.rst new file mode 100644 index 000000000000..4f7db1c05b22 --- /dev/null +++ b/packages/google-maps-areainsights/README.rst @@ -0,0 +1,108 @@ +Python Client for Places Insights API +===================================== + +|preview| |pypi| |versions| + +`Places Insights API`_: Places Insights API. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-maps-areainsights.svg + :target: https://pypi.org/project/google-maps-areainsights/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-maps-areainsights.svg + :target: https://pypi.org/project/google-maps-areainsights/ +.. _Places Insights API: https://developers.google.com/maps/documentation/places-insights +.. _Client Library Documentation: https://googleapis.dev/python/google-maps-areainsights/latest +.. _Product Documentation: https://developers.google.com/maps/documentation/places-insights + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Places Insights API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Places Insights API.: https://developers.google.com/maps/documentation/places-insights +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-maps-areainsights + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-maps-areainsights + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Places Insights API + to see other available methods on the client. +- Read the `Places Insights API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Places Insights API Product documentation: https://developers.google.com/maps/documentation/places-insights +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-maps-areainsights/docs/CHANGELOG.md b/packages/google-maps-areainsights/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-maps-areainsights/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-maps-areainsights/docs/README.rst b/packages/google-maps-areainsights/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-maps-areainsights/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-maps-areainsights/docs/_static/custom.css b/packages/google-maps-areainsights/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-maps-areainsights/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-maps-areainsights/docs/_templates/layout.html b/packages/google-maps-areainsights/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-maps-areainsights/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst b/packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst new file mode 100644 index 000000000000..78835e4cf335 --- /dev/null +++ b/packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst @@ -0,0 +1,6 @@ +AreaInsights +------------------------------ + +.. automodule:: google.maps.areainsights_v1.services.area_insights + :members: + :inherited-members: diff --git a/packages/google-maps-areainsights/docs/areainsights_v1/services_.rst b/packages/google-maps-areainsights/docs/areainsights_v1/services_.rst new file mode 100644 index 000000000000..54fbea792518 --- /dev/null +++ b/packages/google-maps-areainsights/docs/areainsights_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Maps Areainsights v1 API +============================================ +.. toctree:: + :maxdepth: 2 + + area_insights diff --git a/packages/google-maps-areainsights/docs/areainsights_v1/types_.rst b/packages/google-maps-areainsights/docs/areainsights_v1/types_.rst new file mode 100644 index 000000000000..4774a4b375c1 --- /dev/null +++ b/packages/google-maps-areainsights/docs/areainsights_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Maps Areainsights v1 API +========================================= + +.. automodule:: google.maps.areainsights_v1.types + :members: + :show-inheritance: diff --git a/packages/google-maps-areainsights/docs/conf.py b/packages/google-maps-areainsights/docs/conf.py new file mode 100644 index 000000000000..89eaf10bb31d --- /dev/null +++ b/packages/google-maps-areainsights/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-maps-areainsights documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-maps-areainsights" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-maps-areainsights", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-maps-areainsights-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-maps-areainsights.tex", + "google-maps-areainsights Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-maps-areainsights", + "google-maps-areainsights Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-maps-areainsights", + "google-maps-areainsights Documentation", + author, + "google-maps-areainsights", + "google-maps-areainsights Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-maps-areainsights/docs/index.rst b/packages/google-maps-areainsights/docs/index.rst new file mode 100644 index 000000000000..edf03f49e17f --- /dev/null +++ b/packages/google-maps-areainsights/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + areainsights_v1/services_ + areainsights_v1/types_ + + +Changelog +--------- + +For a list of all ``google-maps-areainsights`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-maps-areainsights/docs/multiprocessing.rst b/packages/google-maps-areainsights/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-maps-areainsights/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-maps-areainsights/google/maps/areainsights/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights/__init__.py new file mode 100644 index 000000000000..aeff01ed5cae --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.areainsights import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.maps.areainsights_v1.services.area_insights.async_client import ( + AreaInsightsAsyncClient, +) +from google.maps.areainsights_v1.services.area_insights.client import AreaInsightsClient +from google.maps.areainsights_v1.types.area_insights_service import ( + ComputeInsightsRequest, + ComputeInsightsResponse, + Filter, + Insight, + LocationFilter, + OperatingStatus, + PlaceInsight, + PriceLevel, + RatingFilter, + TypeFilter, +) + +__all__ = ( + "AreaInsightsClient", + "AreaInsightsAsyncClient", + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "Filter", + "LocationFilter", + "PlaceInsight", + "RatingFilter", + "TypeFilter", + "Insight", + "OperatingStatus", + "PriceLevel", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py new file mode 100644 index 000000000000..caeec5b9e887 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-areainsights/google/maps/areainsights/py.typed b/packages/google-maps-areainsights/google/maps/areainsights/py.typed new file mode 100644 index 000000000000..b098dc9b9f40 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-areainsights package uses inline types. diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py new file mode 100644 index 000000000000..4d2ba8c829c8 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.areainsights_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.area_insights import AreaInsightsAsyncClient, AreaInsightsClient +from .types.area_insights_service import ( + ComputeInsightsRequest, + ComputeInsightsResponse, + Filter, + Insight, + LocationFilter, + OperatingStatus, + PlaceInsight, + PriceLevel, + RatingFilter, + TypeFilter, +) + +__all__ = ( + "AreaInsightsAsyncClient", + "AreaInsightsClient", + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "Filter", + "Insight", + "LocationFilter", + "OperatingStatus", + "PlaceInsight", + "PriceLevel", + "RatingFilter", + "TypeFilter", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json new file mode 100644 index 000000000000..413146689761 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.maps.areainsights_v1", + "protoPackage": "google.maps.areainsights.v1", + "schema": "1.0", + "services": { + "AreaInsights": { + "clients": { + "grpc": { + "libraryClient": "AreaInsightsClient", + "rpcs": { + "ComputeInsights": { + "methods": [ + "compute_insights" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AreaInsightsAsyncClient", + "rpcs": { + "ComputeInsights": { + "methods": [ + "compute_insights" + ] + } + } + }, + "rest": { + "libraryClient": "AreaInsightsClient", + "rpcs": { + "ComputeInsights": { + "methods": [ + "compute_insights" + ] + } + } + } + } + } + } +} diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py new file mode 100644 index 000000000000..caeec5b9e887 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed b/packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed new file mode 100644 index 000000000000..b098dc9b9f40 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-areainsights package uses inline types. diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py new file mode 100644 index 000000000000..8630700a35b9 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AreaInsightsAsyncClient +from .client import AreaInsightsClient + +__all__ = ( + "AreaInsightsClient", + "AreaInsightsAsyncClient", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py new file mode 100644 index 000000000000..9a362d2be7e9 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.areainsights_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .client import AreaInsightsClient +from .transports.base import DEFAULT_CLIENT_INFO, AreaInsightsTransport +from .transports.grpc_asyncio import AreaInsightsGrpcAsyncIOTransport + + +class AreaInsightsAsyncClient: + """Service definition for the Places Insights API.""" + + _client: AreaInsightsClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AreaInsightsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AreaInsightsClient._DEFAULT_UNIVERSE + + place_path = staticmethod(AreaInsightsClient.place_path) + parse_place_path = staticmethod(AreaInsightsClient.parse_place_path) + common_billing_account_path = staticmethod( + AreaInsightsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AreaInsightsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AreaInsightsClient.common_folder_path) + parse_common_folder_path = staticmethod(AreaInsightsClient.parse_common_folder_path) + common_organization_path = staticmethod(AreaInsightsClient.common_organization_path) + parse_common_organization_path = staticmethod( + AreaInsightsClient.parse_common_organization_path + ) + common_project_path = staticmethod(AreaInsightsClient.common_project_path) + parse_common_project_path = staticmethod( + AreaInsightsClient.parse_common_project_path + ) + common_location_path = staticmethod(AreaInsightsClient.common_location_path) + parse_common_location_path = staticmethod( + AreaInsightsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsAsyncClient: The constructed client. + """ + return AreaInsightsClient.from_service_account_info.__func__(AreaInsightsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsAsyncClient: The constructed client. + """ + return AreaInsightsClient.from_service_account_file.__func__(AreaInsightsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AreaInsightsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AreaInsightsTransport: + """Returns the transport used by the client instance. + + Returns: + AreaInsightsTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = AreaInsightsClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AreaInsightsTransport, Callable[..., AreaInsightsTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the area insights async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AreaInsightsTransport,Callable[..., AreaInsightsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AreaInsightsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AreaInsightsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def compute_insights( + self, + request: Optional[ + Union[area_insights_service.ComputeInsightsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> area_insights_service.ComputeInsightsResponse: + r"""Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import areainsights_v1 + + async def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsAsyncClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = await client.compute_insights(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.areainsights_v1.types.ComputeInsightsRequest, dict]]): + The request object. Request for the ComputeInsights RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.areainsights_v1.types.ComputeInsightsResponse: + Response for the ComputeInsights RPC. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, area_insights_service.ComputeInsightsRequest): + request = area_insights_service.ComputeInsightsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.compute_insights + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AreaInsightsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AreaInsightsAsyncClient",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py new file mode 100644 index 000000000000..8fdceb253313 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py @@ -0,0 +1,773 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.areainsights_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .transports.base import DEFAULT_CLIENT_INFO, AreaInsightsTransport +from .transports.grpc import AreaInsightsGrpcTransport +from .transports.grpc_asyncio import AreaInsightsGrpcAsyncIOTransport +from .transports.rest import AreaInsightsRestTransport + + +class AreaInsightsClientMeta(type): + """Metaclass for the AreaInsights client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AreaInsightsTransport]] + _transport_registry["grpc"] = AreaInsightsGrpcTransport + _transport_registry["grpc_asyncio"] = AreaInsightsGrpcAsyncIOTransport + _transport_registry["rest"] = AreaInsightsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AreaInsightsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AreaInsightsClient(metaclass=AreaInsightsClientMeta): + """Service definition for the Places Insights API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "areainsights.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "areainsights.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AreaInsightsTransport: + """Returns the transport used by the client instance. + + Returns: + AreaInsightsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def place_path( + place_id: str, + ) -> str: + """Returns a fully-qualified place string.""" + return "places/{place_id}".format( + place_id=place_id, + ) + + @staticmethod + def parse_place_path(path: str) -> Dict[str, str]: + """Parses a place path into its component segments.""" + m = re.match(r"^places/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AreaInsightsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AreaInsightsClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AreaInsightsTransport, Callable[..., AreaInsightsTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the area insights client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AreaInsightsTransport,Callable[..., AreaInsightsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AreaInsightsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AreaInsightsClient._read_environment_variables() + self._client_cert_source = AreaInsightsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AreaInsightsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AreaInsightsTransport) + if transport_provided: + # transport is a AreaInsightsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AreaInsightsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or AreaInsightsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AreaInsightsTransport], Callable[..., AreaInsightsTransport] + ] = ( + AreaInsightsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AreaInsightsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def compute_insights( + self, + request: Optional[ + Union[area_insights_service.ComputeInsightsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> area_insights_service.ComputeInsightsResponse: + r"""Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import areainsights_v1 + + def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = client.compute_insights(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.areainsights_v1.types.ComputeInsightsRequest, dict]): + The request object. Request for the ComputeInsights RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.areainsights_v1.types.ComputeInsightsResponse: + Response for the ComputeInsights RPC. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, area_insights_service.ComputeInsightsRequest): + request = area_insights_service.ComputeInsightsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.compute_insights] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AreaInsightsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AreaInsightsClient",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py new file mode 100644 index 000000000000..5a454cdd5e69 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AreaInsightsTransport +from .grpc import AreaInsightsGrpcTransport +from .grpc_asyncio import AreaInsightsGrpcAsyncIOTransport +from .rest import AreaInsightsRestInterceptor, AreaInsightsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AreaInsightsTransport]] +_transport_registry["grpc"] = AreaInsightsGrpcTransport +_transport_registry["grpc_asyncio"] = AreaInsightsGrpcAsyncIOTransport +_transport_registry["rest"] = AreaInsightsRestTransport + +__all__ = ( + "AreaInsightsTransport", + "AreaInsightsGrpcTransport", + "AreaInsightsGrpcAsyncIOTransport", + "AreaInsightsRestTransport", + "AreaInsightsRestInterceptor", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py new file mode 100644 index 000000000000..06ab292d9102 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.areainsights_v1 import gapic_version as package_version +from google.maps.areainsights_v1.types import area_insights_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AreaInsightsTransport(abc.ABC): + """Abstract transport class for AreaInsights.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "areainsights.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.compute_insights: gapic_v1.method.wrap_method( + self.compute_insights, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + Union[ + area_insights_service.ComputeInsightsResponse, + Awaitable[area_insights_service.ComputeInsightsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AreaInsightsTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py new file mode 100644 index 000000000000..9f02613a9af3 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .base import DEFAULT_CLIENT_INFO, AreaInsightsTransport + + +class AreaInsightsGrpcTransport(AreaInsightsTransport): + """gRPC backend transport for AreaInsights. + + Service definition for the Places Insights API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + area_insights_service.ComputeInsightsResponse, + ]: + r"""Return a callable for the compute insights method over gRPC. + + Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + Returns: + Callable[[~.ComputeInsightsRequest], + ~.ComputeInsightsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "compute_insights" not in self._stubs: + self._stubs["compute_insights"] = self.grpc_channel.unary_unary( + "/google.maps.areainsights.v1.AreaInsights/ComputeInsights", + request_serializer=area_insights_service.ComputeInsightsRequest.serialize, + response_deserializer=area_insights_service.ComputeInsightsResponse.deserialize, + ) + return self._stubs["compute_insights"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AreaInsightsGrpcTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2f5fa839a9ed --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .base import DEFAULT_CLIENT_INFO, AreaInsightsTransport +from .grpc import AreaInsightsGrpcTransport + + +class AreaInsightsGrpcAsyncIOTransport(AreaInsightsTransport): + """gRPC AsyncIO backend transport for AreaInsights. + + Service definition for the Places Insights API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + Awaitable[area_insights_service.ComputeInsightsResponse], + ]: + r"""Return a callable for the compute insights method over gRPC. + + Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + Returns: + Callable[[~.ComputeInsightsRequest], + Awaitable[~.ComputeInsightsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "compute_insights" not in self._stubs: + self._stubs["compute_insights"] = self.grpc_channel.unary_unary( + "/google.maps.areainsights.v1.AreaInsights/ComputeInsights", + request_serializer=area_insights_service.ComputeInsightsRequest.serialize, + response_deserializer=area_insights_service.ComputeInsightsResponse.deserialize, + ) + return self._stubs["compute_insights"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.compute_insights: gapic_v1.method_async.wrap_method( + self.compute_insights, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AreaInsightsGrpcAsyncIOTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py new file mode 100644 index 000000000000..c1fa1d2c5628 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.maps.areainsights_v1.types import area_insights_service + +from .base import AreaInsightsTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AreaInsightsRestInterceptor: + """Interceptor for AreaInsights. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AreaInsightsRestTransport. + + .. code-block:: python + class MyCustomAreaInsightsInterceptor(AreaInsightsRestInterceptor): + def pre_compute_insights(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_compute_insights(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AreaInsightsRestTransport(interceptor=MyCustomAreaInsightsInterceptor()) + client = AreaInsightsClient(transport=transport) + + + """ + + def pre_compute_insights( + self, + request: area_insights_service.ComputeInsightsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[area_insights_service.ComputeInsightsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for compute_insights + + Override in a subclass to manipulate the request or metadata + before they are sent to the AreaInsights server. + """ + return request, metadata + + def post_compute_insights( + self, response: area_insights_service.ComputeInsightsResponse + ) -> area_insights_service.ComputeInsightsResponse: + """Post-rpc interceptor for compute_insights + + Override in a subclass to manipulate the response + after it is returned by the AreaInsights server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AreaInsightsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AreaInsightsRestInterceptor + + +class AreaInsightsRestTransport(AreaInsightsTransport): + """REST backend transport for AreaInsights. + + Service definition for the Places Insights API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AreaInsightsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AreaInsightsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ComputeInsights(AreaInsightsRestStub): + def __hash__(self): + return hash("ComputeInsights") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: area_insights_service.ComputeInsightsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> area_insights_service.ComputeInsightsResponse: + r"""Call the compute insights method over HTTP. + + Args: + request (~.area_insights_service.ComputeInsightsRequest): + The request object. Request for the ComputeInsights RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.area_insights_service.ComputeInsightsResponse: + Response for the ComputeInsights RPC. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1:computeInsights", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_compute_insights( + request, metadata + ) + pb_request = area_insights_service.ComputeInsightsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = area_insights_service.ComputeInsightsResponse() + pb_resp = area_insights_service.ComputeInsightsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_compute_insights(resp) + return resp + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + area_insights_service.ComputeInsightsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ComputeInsights(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AreaInsightsRestTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py new file mode 100644 index 000000000000..52bf7638e04e --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .area_insights_service import ( + ComputeInsightsRequest, + ComputeInsightsResponse, + Filter, + Insight, + LocationFilter, + OperatingStatus, + PlaceInsight, + PriceLevel, + RatingFilter, + TypeFilter, +) + +__all__ = ( + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "Filter", + "LocationFilter", + "PlaceInsight", + "RatingFilter", + "TypeFilter", + "Insight", + "OperatingStatus", + "PriceLevel", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py new file mode 100644 index 000000000000..fa6d48a92d48 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import latlng_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.areainsights.v1", + manifest={ + "Insight", + "OperatingStatus", + "PriceLevel", + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "PlaceInsight", + "Filter", + "LocationFilter", + "TypeFilter", + "RatingFilter", + }, +) + + +class Insight(proto.Enum): + r"""Supported insights. + + Values: + INSIGHT_UNSPECIFIED (0): + Not Specified. + INSIGHT_COUNT (1): + Count insight. + + When this insight is specified ComputeInsights returns the + number of places that match the specified filter criteria. + + :: + + For example if the request is: + ComputeInsightsRequest { + insights: INSIGHT_COUNT + filter { + location_filter {region: } + type_filter {included_types: "restaurant"} + operating_status: OPERATING_STATUS_OPERATIONAL + price_levels: PRICE_LEVEL_FREE + price_levels: PRICE_LEVEL_INEXPENSIVE + min_rating: 4.0 + } + } + + The method will return the count of restaurants in California that are + operational, with price level free or inexpensive and have an average + rating of at least 4 starts. + + Example response: + ComputeInsightsResponse { + count: + } + INSIGHT_PLACES (2): + Return Places + + When this insight is specified ComputeInsights returns + Places that match the specified filter criteria. + + :: + + For example if the request is: + ComputeInsightsRequest { + insights: INSIGHT_PLACES + filter { + location_filter {region: } + type_filter {included_types: "restaurant"} + operating_status: OPERATING_STATUS_OPERATIONAL + price_levels: PRICE_LEVEL_FREE + price_levels: PRICE_LEVEL_INEXPENSIVE + min_rating: 4.0 + } + } + + The method will return list of places of restaurants in + California that are operational, with price level free or inexpensive and + have an average rating of at least 4 stars. + + Example response: + ComputeInsightsResponse { + place_insights { place: "places/ABC" } + place_insights { place: "places/PQR" } + place_insights { place: "places/XYZ" } + } + """ + INSIGHT_UNSPECIFIED = 0 + INSIGHT_COUNT = 1 + INSIGHT_PLACES = 2 + + +class OperatingStatus(proto.Enum): + r"""Operating status of the place. + + Values: + OPERATING_STATUS_UNSPECIFIED (0): + Not Specified. + OPERATING_STATUS_OPERATIONAL (1): + The place is operational and its open during + its defined hours. + OPERATING_STATUS_PERMANENTLY_CLOSED (3): + The Place is no longer in business. + OPERATING_STATUS_TEMPORARILY_CLOSED (4): + The Place is temporarily closed and expected + to reopen in the future. + """ + OPERATING_STATUS_UNSPECIFIED = 0 + OPERATING_STATUS_OPERATIONAL = 1 + OPERATING_STATUS_PERMANENTLY_CLOSED = 3 + OPERATING_STATUS_TEMPORARILY_CLOSED = 4 + + +class PriceLevel(proto.Enum): + r"""Price level of the place. + + Values: + PRICE_LEVEL_UNSPECIFIED (0): + Place price level is unspecified or unknown. + PRICE_LEVEL_FREE (1): + Place provides free services. + PRICE_LEVEL_INEXPENSIVE (2): + Place provides inexpensive services. + PRICE_LEVEL_MODERATE (3): + Place provides moderately priced services. + PRICE_LEVEL_EXPENSIVE (4): + Place provides expensive services. + PRICE_LEVEL_VERY_EXPENSIVE (5): + Place provides very expensive services. + """ + PRICE_LEVEL_UNSPECIFIED = 0 + PRICE_LEVEL_FREE = 1 + PRICE_LEVEL_INEXPENSIVE = 2 + PRICE_LEVEL_MODERATE = 3 + PRICE_LEVEL_EXPENSIVE = 4 + PRICE_LEVEL_VERY_EXPENSIVE = 5 + + +class ComputeInsightsRequest(proto.Message): + r"""Request for the ComputeInsights RPC. + + Attributes: + insights (MutableSequence[google.maps.areainsights_v1.types.Insight]): + Required. Insights to compute. Currently only INSIGHT_COUNT + and INSIGHT_PLACES are supported. + filter (google.maps.areainsights_v1.types.Filter): + Required. Insight filter. + """ + + insights: MutableSequence["Insight"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="Insight", + ) + filter: "Filter" = proto.Field( + proto.MESSAGE, + number=5, + message="Filter", + ) + + +class ComputeInsightsResponse(proto.Message): + r"""Response for the ComputeInsights RPC. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (int): + Result for Insights.INSIGHT_COUNT. + + This field is a member of `oneof`_ ``_count``. + place_insights (MutableSequence[google.maps.areainsights_v1.types.PlaceInsight]): + Result for Insights.INSIGHT_PLACES. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + place_insights: MutableSequence["PlaceInsight"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="PlaceInsight", + ) + + +class PlaceInsight(proto.Message): + r"""Holds information about a place + + Attributes: + place (str): + The resource name of a place. This resource name can be used + to retrieve details about the place using the `Places + API `__. + """ + + place: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Filter(proto.Message): + r"""Filters for the ComputeInsights RPC. + + Attributes: + location_filter (google.maps.areainsights_v1.types.LocationFilter): + Required. Restricts results to places which + are located in the area specified by location + filters. + type_filter (google.maps.areainsights_v1.types.TypeFilter): + Required. Place type filters. + operating_status (MutableSequence[google.maps.areainsights_v1.types.OperatingStatus]): + Optional. Restricts results to places whose operating status + is included on this list. If operating_status is not set, + OPERATING_STATUS_OPERATIONAL is used as default. + price_levels (MutableSequence[google.maps.areainsights_v1.types.PriceLevel]): + Optional. Restricts results to places whose price level is + included on this list. If price_level is not set, all price + levels are included in the results. + rating_filter (google.maps.areainsights_v1.types.RatingFilter): + Optional. Restricts results to places whose average user + ratings are in the range specified by rating_filter. If + rating_filter is not set, all ratings are included in the + result. + """ + + location_filter: "LocationFilter" = proto.Field( + proto.MESSAGE, + number=1, + message="LocationFilter", + ) + type_filter: "TypeFilter" = proto.Field( + proto.MESSAGE, + number=2, + message="TypeFilter", + ) + operating_status: MutableSequence["OperatingStatus"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="OperatingStatus", + ) + price_levels: MutableSequence["PriceLevel"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="PriceLevel", + ) + rating_filter: "RatingFilter" = proto.Field( + proto.MESSAGE, + number=5, + message="RatingFilter", + ) + + +class LocationFilter(proto.Message): + r"""Location filters. + + Specifies the area of interest for the insight. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + circle (google.maps.areainsights_v1.types.LocationFilter.Circle): + Area as a circle. + + This field is a member of `oneof`_ ``area``. + region (google.maps.areainsights_v1.types.LocationFilter.Region): + Area as region. + + This field is a member of `oneof`_ ``area``. + custom_area (google.maps.areainsights_v1.types.LocationFilter.CustomArea): + Custom area specified by a polygon. + + This field is a member of `oneof`_ ``area``. + """ + + class Circle(proto.Message): + r"""A circle is defined by a center point and radius in meters. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lat_lng (google.type.latlng_pb2.LatLng): + The latitude and longitude of the center of + the circle. + + This field is a member of `oneof`_ ``center``. + place (str): + The Place resource name of the center of the + circle. Only point places are supported. + + This field is a member of `oneof`_ ``center``. + radius (int): + Optional. The radius of the circle in meters + """ + + lat_lng: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=1, + oneof="center", + message=latlng_pb2.LatLng, + ) + place: str = proto.Field( + proto.STRING, + number=2, + oneof="center", + ) + radius: int = proto.Field( + proto.INT32, + number=3, + ) + + class Region(proto.Message): + r"""A region is a geographic boundary such as: cities, postal + codes, counties, states, etc. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + place (str): + The Place resource name of a region. + + This field is a member of `oneof`_ ``region``. + """ + + place: str = proto.Field( + proto.STRING, + number=1, + oneof="region", + ) + + class CustomArea(proto.Message): + r"""Custom Area. + + Attributes: + polygon (google.maps.areainsights_v1.types.LocationFilter.CustomArea.Polygon): + Required. The custom area represented as a + polygon + """ + + class Polygon(proto.Message): + r"""A polygon is represented by a series of connected coordinates + in an counterclockwise ordered sequence. The coordinates form a + closed loop and define a filled region. The first and last + coordinates are equivalent, and they must contain identical + values. The format is a simplified version of GeoJSON polygons + (we only support one counterclockwise exterior ring). + + Attributes: + coordinates (MutableSequence[google.type.latlng_pb2.LatLng]): + Optional. The coordinates that define the + polygon. + """ + + coordinates: MutableSequence[latlng_pb2.LatLng] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=latlng_pb2.LatLng, + ) + + polygon: "LocationFilter.CustomArea.Polygon" = proto.Field( + proto.MESSAGE, + number=1, + message="LocationFilter.CustomArea.Polygon", + ) + + circle: Circle = proto.Field( + proto.MESSAGE, + number=1, + oneof="area", + message=Circle, + ) + region: Region = proto.Field( + proto.MESSAGE, + number=2, + oneof="area", + message=Region, + ) + custom_area: CustomArea = proto.Field( + proto.MESSAGE, + number=3, + oneof="area", + message=CustomArea, + ) + + +class TypeFilter(proto.Message): + r"""Place type filters. + + Only Place types from `Table + a `__ + are supported. + + A place can only have a single primary type associated with it. For + example, the primary type might be "mexican_restaurant" or + "steak_house". Use included_primary_types and excluded_primary_types + to filter the results on a place's primary type. + + A place can also have multiple type values associated with it. For + example a restaurant might have the following types: + "seafood_restaurant", "restaurant", "food", "point_of_interest", + "establishment". Use included_types and excluded_types to filter the + results on the list of types associated with a place. + + If a search is specified with multiple type restrictions, only + places that satisfy all of the restrictions are returned. For + example, if you specify {"included_types": ["restaurant"], + "excluded_primary_types": ["steak_house"]}, the returned places + provide "restaurant" related services but do not operate primarily + as a "steak_house". + + If there are any conflicting types, i.e. a type appears in both + included_types and excluded_types types or included_primary_types + and excluded_primary_types, an INVALID_ARGUMENT error is returned. + + One of included_types or included_primary_types must be set. + + Attributes: + included_types (MutableSequence[str]): + Optional. Included Place types. + excluded_types (MutableSequence[str]): + Optional. Excluded Place types. + included_primary_types (MutableSequence[str]): + Optional. Included primary Place types. + excluded_primary_types (MutableSequence[str]): + Optional. Excluded primary Place types. + """ + + included_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + excluded_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + included_primary_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + excluded_primary_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class RatingFilter(proto.Message): + r"""Average user rating filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_rating (float): + Optional. Restricts results to places whose average user + rating is greater than or equal to min_rating. Values must + be between 1.0 and 5.0. + + This field is a member of `oneof`_ ``_min_rating``. + max_rating (float): + Optional. Restricts results to places whose average user + rating is strictly less than or equal to max_rating. Values + must be between 1.0 and 5.0. + + This field is a member of `oneof`_ ``_max_rating``. + """ + + min_rating: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + max_rating: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-areainsights/mypy.ini b/packages/google-maps-areainsights/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-maps-areainsights/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-maps-areainsights/noxfile.py b/packages/google-maps-areainsights/noxfile.py new file mode 100644 index 000000000000..aeee7851401a --- /dev/null +++ b/packages/google-maps-areainsights/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py new file mode 100644 index 000000000000..880f4b4385cc --- /dev/null +++ b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ComputeInsights +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-areainsights + + +# [START areainsights_v1_generated_AreaInsights_ComputeInsights_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import areainsights_v1 + + +async def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsAsyncClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = await client.compute_insights(request=request) + + # Handle the response + print(response) + +# [END areainsights_v1_generated_AreaInsights_ComputeInsights_async] diff --git a/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py new file mode 100644 index 000000000000..cbf41464d3b3 --- /dev/null +++ b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ComputeInsights +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-areainsights + + +# [START areainsights_v1_generated_AreaInsights_ComputeInsights_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import areainsights_v1 + + +def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = client.compute_insights(request=request) + + # Handle the response + print(response) + +# [END areainsights_v1_generated_AreaInsights_ComputeInsights_sync] diff --git a/packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json b/packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json new file mode 100644 index 000000000000..508e4607c0f8 --- /dev/null +++ b/packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json @@ -0,0 +1,168 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.maps.areainsights.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-maps-areainsights", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.areainsights_v1.AreaInsightsAsyncClient", + "shortName": "AreaInsightsAsyncClient" + }, + "fullName": "google.maps.areainsights_v1.AreaInsightsAsyncClient.compute_insights", + "method": { + "fullName": "google.maps.areainsights.v1.AreaInsights.ComputeInsights", + "service": { + "fullName": "google.maps.areainsights.v1.AreaInsights", + "shortName": "AreaInsights" + }, + "shortName": "ComputeInsights" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.areainsights_v1.types.ComputeInsightsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.areainsights_v1.types.ComputeInsightsResponse", + "shortName": "compute_insights" + }, + "description": "Sample for ComputeInsights", + "file": "areainsights_v1_generated_area_insights_compute_insights_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "areainsights_v1_generated_AreaInsights_ComputeInsights_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "areainsights_v1_generated_area_insights_compute_insights_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.areainsights_v1.AreaInsightsClient", + "shortName": "AreaInsightsClient" + }, + "fullName": "google.maps.areainsights_v1.AreaInsightsClient.compute_insights", + "method": { + "fullName": "google.maps.areainsights.v1.AreaInsights.ComputeInsights", + "service": { + "fullName": "google.maps.areainsights.v1.AreaInsights", + "shortName": "AreaInsights" + }, + "shortName": "ComputeInsights" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.areainsights_v1.types.ComputeInsightsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.areainsights_v1.types.ComputeInsightsResponse", + "shortName": "compute_insights" + }, + "description": "Sample for ComputeInsights", + "file": "areainsights_v1_generated_area_insights_compute_insights_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "areainsights_v1_generated_AreaInsights_ComputeInsights_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "areainsights_v1_generated_area_insights_compute_insights_sync.py" + } + ] +} diff --git a/packages/google-maps-areainsights/scripts/decrypt-secrets.sh b/packages/google-maps-areainsights/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-maps-areainsights/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py b/packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py new file mode 100644 index 000000000000..2b35b82f7b4e --- /dev/null +++ b/packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class areainsightsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'compute_insights': ('insights', 'filter', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=areainsightsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the areainsights client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-maps-areainsights/setup.py b/packages/google-maps-areainsights/setup.py new file mode 100644 index 000000000000..70e743f355eb --- /dev/null +++ b/packages/google-maps-areainsights/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-maps-areainsights" + + +description = "Google Maps Areainsights API client library" + +version = None + +with open( + os.path.join(package_root, "google/maps/areainsights/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-maps-areainsights/testing/.gitignore b/packages/google-maps-areainsights/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-maps-areainsights/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-maps-areainsights/testing/constraints-3.10.txt b/packages/google-maps-areainsights/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.11.txt b/packages/google-maps-areainsights/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.12.txt b/packages/google-maps-areainsights/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.7.txt b/packages/google-maps-areainsights/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-maps-areainsights/testing/constraints-3.8.txt b/packages/google-maps-areainsights/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.9.txt b/packages/google-maps-areainsights/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/tests/__init__.py b/packages/google-maps-areainsights/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/__init__.py b/packages/google-maps-areainsights/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/gapic/__init__.py b/packages/google-maps-areainsights/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py new file mode 100644 index 000000000000..f82db5638443 --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py @@ -0,0 +1,2315 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.type import latlng_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.maps.areainsights_v1.services.area_insights import ( + AreaInsightsAsyncClient, + AreaInsightsClient, + transports, +) +from google.maps.areainsights_v1.types import area_insights_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AreaInsightsClient._get_default_mtls_endpoint(None) is None + assert ( + AreaInsightsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AreaInsightsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AreaInsightsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AreaInsightsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert AreaInsightsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert AreaInsightsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AreaInsightsClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AreaInsightsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AreaInsightsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AreaInsightsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AreaInsightsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AreaInsightsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AreaInsightsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AreaInsightsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AreaInsightsClient._get_client_cert_source(None, False) is None + assert ( + AreaInsightsClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + AreaInsightsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AreaInsightsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AreaInsightsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + default_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AreaInsightsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AreaInsightsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, default_universe, "always") + == AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AreaInsightsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AreaInsightsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AreaInsightsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AreaInsightsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AreaInsightsClient._get_universe_domain(None, None) + == AreaInsightsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AreaInsightsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc"), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AreaInsightsClient, "grpc"), + (AreaInsightsAsyncClient, "grpc_asyncio"), + (AreaInsightsClient, "rest"), + ], +) +def test_area_insights_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "areainsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AreaInsightsGrpcTransport, "grpc"), + (transports.AreaInsightsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AreaInsightsRestTransport, "rest"), + ], +) +def test_area_insights_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AreaInsightsClient, "grpc"), + (AreaInsightsAsyncClient, "grpc_asyncio"), + (AreaInsightsClient, "rest"), + ], +) +def test_area_insights_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "areainsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com" + ) + + +def test_area_insights_client_get_transport_class(): + transport = AreaInsightsClient.get_transport_class() + available_transports = [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsRestTransport, + ] + assert transport in available_transports + + transport = AreaInsightsClient.get_transport_class("grpc") + assert transport == transports.AreaInsightsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest"), + ], +) +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +def test_area_insights_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AreaInsightsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AreaInsightsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc", "true"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc", "false"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest", "true"), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_area_insights_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [AreaInsightsClient, AreaInsightsAsyncClient]) +@mock.patch.object( + AreaInsightsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AreaInsightsClient) +) +@mock.patch.object( + AreaInsightsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AreaInsightsAsyncClient), +) +def test_area_insights_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [AreaInsightsClient, AreaInsightsAsyncClient]) +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +def test_area_insights_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + default_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest"), + ], +) +def test_area_insights_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AreaInsightsClient, + transports.AreaInsightsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest", None), + ], +) +def test_area_insights_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_area_insights_client_client_options_from_dict(): + with mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AreaInsightsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AreaInsightsClient, + transports.AreaInsightsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_area_insights_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "areainsights.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="areainsights.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + area_insights_service.ComputeInsightsRequest, + dict, + ], +) +def test_compute_insights(request_type, transport: str = "grpc"): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = area_insights_service.ComputeInsightsResponse( + count=553, + ) + response = client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = area_insights_service.ComputeInsightsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, area_insights_service.ComputeInsightsResponse) + assert response.count == 553 + + +def test_compute_insights_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.compute_insights() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == area_insights_service.ComputeInsightsRequest() + + +def test_compute_insights_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = area_insights_service.ComputeInsightsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.compute_insights(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == area_insights_service.ComputeInsightsRequest() + + +def test_compute_insights_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.compute_insights in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.compute_insights + ] = mock_rpc + request = {} + client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.compute_insights(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_compute_insights_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + area_insights_service.ComputeInsightsResponse( + count=553, + ) + ) + response = await client.compute_insights() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == area_insights_service.ComputeInsightsRequest() + + +@pytest.mark.asyncio +async def test_compute_insights_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.compute_insights + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.compute_insights + ] = mock_rpc + + request = {} + await client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.compute_insights(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_compute_insights_async( + transport: str = "grpc_asyncio", + request_type=area_insights_service.ComputeInsightsRequest, +): + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + area_insights_service.ComputeInsightsResponse( + count=553, + ) + ) + response = await client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = area_insights_service.ComputeInsightsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, area_insights_service.ComputeInsightsResponse) + assert response.count == 553 + + +@pytest.mark.asyncio +async def test_compute_insights_async_from_dict(): + await test_compute_insights_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + area_insights_service.ComputeInsightsRequest, + dict, + ], +) +def test_compute_insights_rest(request_type): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = area_insights_service.ComputeInsightsResponse( + count=553, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = area_insights_service.ComputeInsightsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.compute_insights(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, area_insights_service.ComputeInsightsResponse) + assert response.count == 553 + + +def test_compute_insights_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.compute_insights in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.compute_insights + ] = mock_rpc + + request = {} + client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.compute_insights(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_compute_insights_rest_required_fields( + request_type=area_insights_service.ComputeInsightsRequest, +): + transport_class = transports.AreaInsightsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_insights._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_insights._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = area_insights_service.ComputeInsightsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = area_insights_service.ComputeInsightsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.compute_insights(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_compute_insights_rest_unset_required_fields(): + transport = transports.AreaInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.compute_insights._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "insights", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_compute_insights_rest_interceptors(null_interceptor): + transport = transports.AreaInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AreaInsightsRestInterceptor(), + ) + client = AreaInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AreaInsightsRestInterceptor, "post_compute_insights" + ) as post, mock.patch.object( + transports.AreaInsightsRestInterceptor, "pre_compute_insights" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = area_insights_service.ComputeInsightsRequest.pb( + area_insights_service.ComputeInsightsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + area_insights_service.ComputeInsightsResponse.to_json( + area_insights_service.ComputeInsightsResponse() + ) + ) + + request = area_insights_service.ComputeInsightsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = area_insights_service.ComputeInsightsResponse() + + client.compute_insights( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_compute_insights_rest_bad_request( + transport: str = "rest", request_type=area_insights_service.ComputeInsightsRequest +): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.compute_insights(request) + + +def test_compute_insights_rest_error(): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AreaInsightsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AreaInsightsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsGrpcAsyncIOTransport, + transports.AreaInsightsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AreaInsightsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AreaInsightsGrpcTransport, + ) + + +def test_area_insights_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AreaInsightsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_area_insights_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AreaInsightsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("compute_insights",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_area_insights_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AreaInsightsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_area_insights_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AreaInsightsTransport() + adc.assert_called_once() + + +def test_area_insights_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AreaInsightsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsGrpcAsyncIOTransport, + ], +) +def test_area_insights_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsGrpcAsyncIOTransport, + transports.AreaInsightsRestTransport, + ], +) +def test_area_insights_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AreaInsightsGrpcTransport, grpc_helpers), + (transports.AreaInsightsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_area_insights_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "areainsights.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="areainsights.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.AreaInsightsGrpcTransport, transports.AreaInsightsGrpcAsyncIOTransport], +) +def test_area_insights_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_area_insights_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AreaInsightsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_area_insights_host_no_port(transport_name): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="areainsights.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "areainsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_area_insights_host_with_port(transport_name): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="areainsights.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "areainsights.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_area_insights_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AreaInsightsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AreaInsightsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.compute_insights._session + session2 = client2.transport.compute_insights._session + assert session1 != session2 + + +def test_area_insights_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AreaInsightsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_area_insights_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AreaInsightsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AreaInsightsGrpcTransport, transports.AreaInsightsGrpcAsyncIOTransport], +) +def test_area_insights_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AreaInsightsGrpcTransport, transports.AreaInsightsGrpcAsyncIOTransport], +) +def test_area_insights_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_place_path(): + place_id = "squid" + expected = "places/{place_id}".format( + place_id=place_id, + ) + actual = AreaInsightsClient.place_path(place_id) + assert expected == actual + + +def test_parse_place_path(): + expected = { + "place_id": "clam", + } + path = AreaInsightsClient.place_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_place_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AreaInsightsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = AreaInsightsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AreaInsightsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = AreaInsightsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AreaInsightsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = AreaInsightsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = AreaInsightsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = AreaInsightsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AreaInsightsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = AreaInsightsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AreaInsightsTransport, "_prep_wrapped_messages" + ) as prep: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AreaInsightsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AreaInsightsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport), + (AreaInsightsAsyncClient, transports.AreaInsightsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/release-please-config.json b/release-please-config.json index b2f26908453f..c33b2c671761 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3490,6 +3490,16 @@ } ], "release-type": "python" + }, + "packages/google-maps-areainsights": { + "extra-files": [ + "google/maps/areainsights_v1/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json", + "jsonpath": "$.clientLibrary.version" + } + ] } } } From 1f7011b9f31a26e3a415f31c2533758f453dcb3d Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:58:41 -0700 Subject: [PATCH 12/59] chore: Update release-please config files (#13080) Update release-please config files --- .release-please-manifest.json | 1 + release-please-config.json | 25 +++++++++++++++---------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a23882e898d2..cc2385c7dd15 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -178,6 +178,7 @@ "packages/google-cloud-workstations": "0.5.8", "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", + "packages/google-maps-areainsights": "0.0.0", "packages/google-maps-fleetengine": "0.2.2", "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", diff --git a/release-please-config.json b/release-please-config.json index c33b2c671761..ca70cf6baec5 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3197,6 +3197,21 @@ ], "release-type": "python" }, + "packages/google-maps-areainsights": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-maps-areainsights", + "extra-files": [ + "google/maps/areainsights/gapic_version.py", + "google/maps/areainsights_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-maps-fleetengine": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -3490,16 +3505,6 @@ } ], "release-type": "python" - }, - "packages/google-maps-areainsights": { - "extra-files": [ - "google/maps/areainsights_v1/gapic_version.py", - { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json", - "jsonpath": "$.clientLibrary.version" - } - ] } } } From 77b64f5739e1599d0d4f6860fc668e1e521168f4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:18:51 -0700 Subject: [PATCH 13/59] chore: release main (#13082) :robot: I have created a release *beep* *boop* ---
google-maps-areainsights: 0.1.0 ## 0.1.0 (2024-09-12) ### Features * add initial files for google.maps.areainsights.v1 ([#13078](https://github.com/googleapis/google-cloud-python/issues/13078)) ([ae58345](https://github.com/googleapis/google-cloud-python/commit/ae583456fd0a2f2e46ce8759918ebe3fc19a0fe8))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-maps-areainsights/CHANGELOG.md | 11 ++++++++++- .../google/maps/areainsights/gapic_version.py | 2 +- .../google/maps/areainsights_v1/gapic_version.py | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index cc2385c7dd15..2a86228c483b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -178,7 +178,7 @@ "packages/google-cloud-workstations": "0.5.8", "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", - "packages/google-maps-areainsights": "0.0.0", + "packages/google-maps-areainsights": "0.1.0", "packages/google-maps-fleetengine": "0.2.2", "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", diff --git a/packages/google-maps-areainsights/CHANGELOG.md b/packages/google-maps-areainsights/CHANGELOG.md index 5ddad421e08f..2f417509fcba 100644 --- a/packages/google-maps-areainsights/CHANGELOG.md +++ b/packages/google-maps-areainsights/CHANGELOG.md @@ -1 +1,10 @@ -# Changelog \ No newline at end of file +# Changelog + +## 0.1.0 (2024-09-12) + + +### Features + +* add initial files for google.maps.areainsights.v1 ([#13078](https://github.com/googleapis/google-cloud-python/issues/13078)) ([ae58345](https://github.com/googleapis/google-cloud-python/commit/ae583456fd0a2f2e46ce8759918ebe3fc19a0fe8)) + +## Changelog diff --git a/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py index caeec5b9e887..20d1d778beb7 100644 --- a/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py +++ b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py index caeec5b9e887..20d1d778beb7 100644 --- a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} From 2402404a5ac48c8289a2dbc24fcc85a1eebe4224 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Sep 2024 17:53:35 -0700 Subject: [PATCH 14/59] feat: [google-cloud-dataproc] add resource reference for KMS keys and fix comments (#13072) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: [google-cloud-dataproc] add resource reference for KMS keys and fix comments feat: [google-cloud-dataproc] Allow flink and trino job support for workflow templates API feat: [google-cloud-dataproc] Add unreachable output field for LIST workflow template API feat: [google-cloud-dataproc] Add unreachable output field for LIST batch templates API feat: [google-cloud-dataproc] Add kms key input for create cluster API feat: [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source feat: [google-cloud-dataproc] Allow flink job support for jobs feat: [google-cloud-dataproc] Add unreachable output field for LIST jobs API END_COMMIT_OVERRIDE PiperOrigin-RevId: 674408200 Source-Link: https://github.com/googleapis/googleapis/commit/964f6c9ab536dd33a99bbba8d9d8071d0cf39f89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2979ec5cbcf2abae857f9a4cbe5cca09f040cb58 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiIyOTc5ZWM1Y2JjZjJhYmFlODU3ZjlhNGNiZTVjY2EwOWYwNDBjYjU4In0= BEGIN_NESTED_COMMIT feat: [google-cloud-dataproc] add support for new Dataproc features 1. Allow flink and trino job support for workflow templates API 2. Add unreachable output field for LIST workflow template API 4. Add unreachable output field for LIST batch templates API 5. Add kms key input for create cluster API 6. Add FLINK metric source for Dataproc Metric Source PiperOrigin-RevId: 673000575 Source-Link: https://github.com/googleapis/googleapis/commit/02f62c8e241a9f95f0d183785354e90e35388cbd Source-Link: https://github.com/googleapis/googleapis-gen/commit/7726f478ac19d91e914ed3ae546cf24453d000b5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI3NzI2ZjQ3OGFjMTlkOTFlOTE0ZWQzYWU1NDZjZjI0NDUzZDAwMGI1In0= END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat: [google-cloud-dataproc] add support for new Dataproc features 1. Allow flink job support for jobs 2. Add unreachable output field for LIST jobs API PiperOrigin-RevId: 672705294 Source-Link: https://github.com/googleapis/googleapis/commit/32bc03653260356351854429bd7e2dfbf670d352 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46e7728c9908d9793ebce1061b0d1c6c4bad925b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI0NmU3NzI4Yzk5MDhkOTc5M2ViY2UxMDYxYjBkMWM2YzRiYWQ5MjViIn0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/dataproc/__init__.py | 2 + .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/__init__.py | 2 + .../google/cloud/dataproc_v1/gapic_version.py | 2 +- .../cluster_controller/async_client.py | 11 +- .../services/cluster_controller/client.py | 33 +++- .../workflow_template_service/async_client.py | 4 + .../workflow_template_service/client.py | 24 +++ .../cloud/dataproc_v1/types/__init__.py | 2 + .../google/cloud/dataproc_v1/types/batches.py | 9 + .../cloud/dataproc_v1/types/clusters.py | 108 ++++++++--- .../google/cloud/dataproc_v1/types/jobs.py | 164 +++++++++++++---- .../google/cloud/dataproc_v1/types/shared.py | 9 +- .../dataproc_v1/types/workflow_templates.py | 78 ++++++++ ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../dataproc_v1/test_batch_controller.py | 7 + .../dataproc_v1/test_cluster_controller.py | 93 +++++++--- .../gapic/dataproc_v1/test_job_controller.py | 16 ++ .../test_workflow_template_service.py | 168 ++++++++++++++---- 19 files changed, 598 insertions(+), 138 deletions(-) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py index 8c0be718b5bc..1c45dca78fda 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -135,6 +135,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -315,6 +316,7 @@ "CancelJobRequest", "DeleteJobRequest", "DriverSchedulingConfig", + "FlinkJob", "GetJobRequest", "HadoopJob", "HiveJob", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 0f412e925d59..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.11.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py index 49bcea5780a2..e89772784679 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -115,6 +115,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -271,6 +272,7 @@ "EnvironmentConfig", "ExecutionConfig", "FailureAction", + "FlinkJob", "GceClusterConfig", "GetAutoscalingPolicyRequest", "GetBatchRequest", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 0f412e925d59..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.11.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index b3c00033b6a1..72ad480491a2 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -74,6 +74,8 @@ class ClusterControllerAsyncClient: cluster_path = staticmethod(ClusterControllerClient.cluster_path) parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) + crypto_key_path = staticmethod(ClusterControllerClient.crypto_key_path) + parse_crypto_key_path = staticmethod(ClusterControllerClient.parse_crypto_key_path) node_group_path = staticmethod(ClusterControllerClient.node_group_path) parse_node_group_path = staticmethod(ClusterControllerClient.parse_node_group_path) service_path = staticmethod(ClusterControllerClient.service_path) @@ -1236,10 +1238,11 @@ async def sample_list_clusters(): label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the + ``ERROR``, ``DELETING``, ``UPDATING``, ``STOPPING``, or + ``STOPPED``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` + contains the ``DELETING``, ``ERROR``, ``STOPPING``, and + ``STOPPED`` states. ``clusterName`` is the name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index d46589e86a36..d0662bc0348c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -215,6 +215,30 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_group_path( project: str, @@ -1686,10 +1710,11 @@ def sample_list_clusters(): label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the + ``ERROR``, ``DELETING``, ``UPDATING``, ``STOPPING``, or + ``STOPPED``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` + contains the ``DELETING``, ``ERROR``, ``STOPPING``, and + ``STOPPED`` states. ``clusterName`` is the name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index 85222311c4d7..90bd5f500cbc 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -75,6 +75,10 @@ class WorkflowTemplateServiceAsyncClient: ) _DEFAULT_UNIVERSE = WorkflowTemplateServiceClient._DEFAULT_UNIVERSE + crypto_key_path = staticmethod(WorkflowTemplateServiceClient.crypto_key_path) + parse_crypto_key_path = staticmethod( + WorkflowTemplateServiceClient.parse_crypto_key_path + ) node_group_path = staticmethod(WorkflowTemplateServiceClient.node_group_path) parse_node_group_path = staticmethod( WorkflowTemplateServiceClient.parse_node_group_path diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 4da14dd2c32c..175bc494f421 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -194,6 +194,30 @@ def transport(self) -> WorkflowTemplateServiceTransport: """ return self._transport + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_group_path( project: str, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py index 535fc0e4fc92..2bf4fcd11209 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -84,6 +84,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -248,6 +249,7 @@ "CancelJobRequest", "DeleteJobRequest", "DriverSchedulingConfig", + "FlinkJob", "GetJobRequest", "HadoopJob", "HiveJob", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py index bff597bc91b6..2459180957df 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py @@ -183,6 +183,11 @@ class ListBatchesResponse(proto.Message): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. + unreachable (MutableSequence[str]): + Output only. List of Batches that could not + be included in the response. Attempting to get + one of these resources may indicate why it was + not included in the list response. """ @property @@ -198,6 +203,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteBatchRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index b6e60e1765c0..169d350f046b 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -464,15 +464,50 @@ class EncryptionConfig(proto.Message): Attributes: gce_pd_kms_key_name (str): - Optional. The Cloud KMS key name to use for - PD disk encryption for all instances in the - cluster. + Optional. The Cloud KMS key resource name to use for + persistent disk encryption for all instances in the cluster. + See [Use CMEK with cluster data] + (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + for more information. + kms_key (str): + Optional. The Cloud KMS key resource name to use for cluster + persistent disk and job argument encryption. See [Use CMEK + with cluster data] + (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + for more information. + + When this key resource name is provided, the following job + arguments of the following job types submitted to the + cluster are encrypted using CMEK: + + - `FlinkJob + args `__ + - `HadoopJob + args `__ + - `SparkJob + args `__ + - `SparkRJob + args `__ + - `PySparkJob + args `__ + - `SparkSqlJob `__ + scriptVariables and queryList.queries + - `HiveJob `__ + scriptVariables and queryList.queries + - `PigJob `__ + scriptVariables and queryList.queries + - `PrestoJob `__ + scriptVariables and queryList.queries """ gce_pd_kms_key_name: str = proto.Field( proto.STRING, number=1, ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) class GceClusterConfig(proto.Message): @@ -519,14 +554,25 @@ class GceClusterConfig(proto.Message): - ``projects/[project_id]/regions/[region]/subnetworks/sub0`` - ``sub0`` internal_ip_only (bool): - Optional. If true, all instances in the cluster will only - have internal IP addresses. By default, clusters are not - restricted to internal IP addresses, and will have ephemeral - external IP addresses assigned to each instance. This - ``internal_ip_only`` restriction can only be enabled for - subnetwork enabled networks, and all off-cluster - dependencies must be configured to be accessible without - external IP addresses. + Optional. This setting applies to subnetwork-enabled + networks. It is set to ``true`` by default in clusters + created with image versions 2.2.x. + + When set to ``true``: + + - All cluster VMs have internal IP addresses. + - [Google Private Access] + (https://cloud.google.com/vpc/docs/private-google-access) + must be enabled to access Dataproc and other Google Cloud + APIs. + - Off-cluster dependencies must be configured to be + accessible without external IP addresses. + + When set to ``false``: + + - Cluster VMs are not restricted to internal IP addresses. + - Ephemeral external IP addresses are assigned to each + cluster VM. This field is a member of `oneof`_ ``_internal_ip_only``. private_ipv6_google_access (google.cloud.dataproc_v1.types.GceClusterConfig.PrivateIpv6GoogleAccess): @@ -560,9 +606,9 @@ class GceClusterConfig(proto.Message): - https://www.googleapis.com/auth/bigtable.data - https://www.googleapis.com/auth/devstorage.full_control tags (MutableSequence[str]): - The Compute Engine tags to add to all instances (see + The Compute Engine network tags to add to all instances (see `Tagging - instances `__). + instances `__). metadata (MutableMapping[str, str]): Optional. The Compute Engine metadata entries to add to all instances (see `Project and instance @@ -1156,15 +1202,15 @@ class AcceleratorConfig(proto.Message): Examples: - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``nvidia-tesla-k80`` + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-t4`` + - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-t4`` + - ``nvidia-tesla-t4`` **Auto Zone Exception**: If you are using the Dataproc `Auto Zone Placement `__ feature, you must use the short name of the accelerator type - resource, for example, ``nvidia-tesla-k80``. + resource, for example, ``nvidia-tesla-t4``. accelerator_count (int): The number of the accelerator cards of this type exposed to this instance. @@ -1501,8 +1547,8 @@ class KerberosConfig(proto.Message): encrypted file containing the root principal password. kms_key_uri (str): - Optional. The uri of the KMS key used to - encrypt various sensitive files. + Optional. The URI of the KMS key used to + encrypt sensitive files. keystore_uri (str): Optional. The Cloud Storage URI of the keystore file used for SSL encryption. If not @@ -1649,7 +1695,7 @@ class SoftwareConfig(proto.Message): image_version (str): Optional. The version of software inside the cluster. It must be one of the supported `Dataproc - Versions `__, + Versions `__, such as "1.2" (including a subminor version, such as "1.2.29"), or the `"preview" version `__. @@ -1834,6 +1880,8 @@ class MetricSource(proto.Enum): Hiveserver2 metric source. HIVEMETASTORE (7): hivemetastore metric source + FLINK (8): + flink metric source """ METRIC_SOURCE_UNSPECIFIED = 0 MONITORING_AGENT_DEFAULTS = 1 @@ -1843,6 +1891,7 @@ class MetricSource(proto.Enum): SPARK_HISTORY_SERVER = 5 HIVESERVER2 = 6 HIVEMETASTORE = 7 + FLINK = 8 class Metric(proto.Message): r"""A Dataproc custom metric. @@ -2312,11 +2361,12 @@ class ListClustersRequest(proto.Message): or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, - ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, or - ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, - ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains - the ``DELETING`` and ``ERROR`` states. ``clusterName`` is - the name of the cluster provided at creation time. Only the + ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, + ``UPDATING``, ``STOPPING``, or ``STOPPED``. ``ACTIVE`` + contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` + states. ``INACTIVE`` contains the ``DELETING``, ``ERROR``, + ``STOPPING``, and ``STOPPED`` states. ``clusterName`` is the + name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. @@ -2393,10 +2443,10 @@ class DiagnoseClusterRequest(proto.Message): cluster_name (str): Required. The cluster name. tarball_gcs_dir (str): - Optional. The output Cloud Storage directory - for the diagnostic tarball. If not specified, a - task-specific directory in the cluster's staging - bucket will be used. + Optional. (Optional) The output Cloud Storage + directory for the diagnostic tarball. If not + specified, a task-specific directory in the + cluster's staging bucket will be used. tarball_access (google.cloud.dataproc_v1.types.DiagnoseClusterRequest.TarballAccess): Optional. (Optional) The access type to the diagnostic tarball. If not specified, falls back diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py index b0e094f18985..2f9bcc9dba29 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py @@ -35,6 +35,7 @@ "SparkRJob", "PrestoJob", "TrinoJob", + "FlinkJob", "JobPlacement", "JobStatus", "JobReference", @@ -60,7 +61,7 @@ class LoggingConfig(proto.Message): Attributes: driver_log_levels (MutableMapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): The per-package log levels for the driver. - This may include "root" package name to + This can include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' @@ -144,7 +145,7 @@ class HadoopJob(proto.Message): args (MutableSequence[str]): Optional. The arguments to pass to the driver. Do not include arguments, such as ``-libjars`` or ``-Dfoo=bar``, - that can be set as job properties, since a collision may + that can be set as job properties, since a collision might occur that causes an incorrect job submission. jar_file_uris (MutableSequence[str]): Optional. Jar file URIs to add to the @@ -163,7 +164,7 @@ class HadoopJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set - by the Dataproc API may be overwritten. Can include + by the Dataproc API might be overwritten. Can include properties set in ``/etc/hadoop/conf/*-site`` and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): @@ -229,7 +230,7 @@ class SparkJob(proto.Message): main_class (str): The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. + specified in SparkJob.jar_file_uris. This field is a member of `oneof`_ ``driver``. args (MutableSequence[str]): @@ -253,8 +254,9 @@ class SparkJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Spark. Properties that - conflict with values set by the Dataproc API may - be overwritten. Can include properties set in + conflict with values set by the Dataproc API + might be overwritten. Can include properties set + in /etc/spark/conf/spark-defaults.conf and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): @@ -335,7 +337,7 @@ class PySparkJob(proto.Message): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc - API may be overwritten. Can include properties + API might be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. @@ -441,8 +443,8 @@ class HiveJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, + the Dataproc API might be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site.xml``, /etc/hive/conf/hive-site.xml, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -511,7 +513,7 @@ class SparkSqlJob(proto.Message): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the - Dataproc API may be overwritten. + Dataproc API might be overwritten. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -583,8 +585,8 @@ class PigJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, + the Dataproc API might be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site.xml``, /etc/pig/conf/pig.properties, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -659,7 +661,7 @@ class SparkRJob(proto.Message): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc - API may be overwritten. Can include properties + API might be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. @@ -856,6 +858,86 @@ class TrinoJob(proto.Message): ) +class FlinkJob(proto.Message): + r"""A Dataproc job for running Apache Flink applications on YARN. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The HCFS URI of the jar file that contains + the main class. + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file that + contains the class must be in the default CLASSPATH or + specified in + [jarFileUris][google.cloud.dataproc.v1.FlinkJob.jar_file_uris]. + + This field is a member of `oneof`_ ``driver``. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``--conf``, that can be set as + job properties, since a collision might occur that causes an + incorrect job submission. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATHs of the Flink driver and tasks. + savepoint_uri (str): + Optional. HCFS URI of the savepoint, which + contains the last saved progress for starting + the current job. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values, used to + configure Flink. Properties that conflict with values set by + the Dataproc API might be overwritten. Can include + properties set in ``/etc/flink/conf/flink-defaults.conf`` + and classes in user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="driver", + ) + main_class: str = proto.Field( + proto.STRING, + number=2, + oneof="driver", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + savepoint_uri: str = proto.Field( + proto.STRING, + number=9, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="LoggingConfig", + ) + + class JobPlacement(proto.Message): r"""Dataproc job config. @@ -894,9 +976,8 @@ class JobStatus(proto.Message): Output only. A state message specifying the overall job state. details (str): - Optional. Output only. Job state details, - such as an error description if the state is - ERROR. + Optional. Output only. Job state details, such as an error + description if the state is ``ERROR``. state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when this state was entered. @@ -963,14 +1044,14 @@ class Substate(proto.Enum): Applies to RUNNING state. QUEUED (2): The Job has been received and is awaiting - execution (it may be waiting for a condition to - be met). See the "details" field for the reason - for the delay. + execution (it might be waiting for a condition + to be met). See the "details" field for the + reason for the delay. Applies to RUNNING state. STALE_STATUS (3): The agent-reported status is out of date, - which may be caused by a loss of communication + which can be caused by a loss of communication between the agent and Dataproc. If the agent does not send a timely update, the job will fail. @@ -1165,10 +1246,14 @@ class Job(proto.Message): trino_job (google.cloud.dataproc_v1.types.TrinoJob): Optional. Job is a Trino job. + This field is a member of `oneof`_ ``type_job``. + flink_job (google.cloud.dataproc_v1.types.FlinkJob): + Optional. Job is a Flink job. + This field is a member of `oneof`_ ``type_job``. status (google.cloud.dataproc_v1.types.JobStatus): Output only. The job status. Additional application-specific - status information may be contained in the type_job and + status information might be contained in the type_job and yarn_applications fields. status_history (MutableSequence[google.cloud.dataproc_v1.types.JobStatus]): Output only. The previous job status. @@ -1177,20 +1262,20 @@ class Job(proto.Message): this job. **Beta** Feature: This report is available for testing - purposes only. It may be changed before final release. + purposes only. It might be changed before final release. driver_output_resource_uri (str): Output only. A URI pointing to the location of the stdout of the job's driver program. driver_control_files_uri (str): Output only. If present, the location of miscellaneous - control files which may be used as part of job setup and - handling. If not present, control files may be placed in the - same location as ``driver_output_uri``. + control files which can be used as part of job setup and + handling. If not present, control files might be placed in + the same location as ``driver_output_uri``. labels (MutableMapping[str, str]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. - Label **values** may be empty, but, if present, must contain + Label **values** can be empty, but, if present, must contain 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a job. @@ -1199,7 +1284,8 @@ class Job(proto.Message): job_uuid (str): Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a - user-settable reference.job_id that may be reused over time. + user-settable reference.job_id that might be reused over + time. done (bool): Output only. Indicates whether the job is completed. If the value is ``false``, the job is still in progress. If @@ -1273,6 +1359,12 @@ class Job(proto.Message): oneof="type_job", message="TrinoJob", ) + flink_job: "FlinkJob" = proto.Field( + proto.MESSAGE, + number=29, + oneof="type_job", + message="FlinkJob", + ) status: "JobStatus" = proto.Field( proto.MESSAGE, number=8, @@ -1348,12 +1440,12 @@ class JobScheduling(proto.Message): Attributes: max_failures_per_hour (int): - Optional. Maximum number of times per hour a driver may be + Optional. Maximum number of times per hour a driver can be restarted as a result of driver exiting with non-zero code before job is reported failed. - A job may be reported as thrashing if the driver exits with - a non-zero code four times within a 10-minute window. + A job might be reported as thrashing if the driver exits + with a non-zero code four times within a 10-minute window. Maximum value is 10. @@ -1361,7 +1453,7 @@ class JobScheduling(proto.Message): Dataproc [workflow templates] (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template). max_failures_total (int): - Optional. Maximum total number of times a driver may be + Optional. Maximum total number of times a driver can be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be reported as failed. @@ -1644,6 +1736,12 @@ class ListJobsResponse(proto.Message): are more results to fetch. To fetch additional results, provide this value as the ``page_token`` in a subsequent ListJobsRequest. + unreachable (MutableSequence[str]): + Output only. List of jobs with + [kms_key][google.cloud.dataproc.v1.EncryptionConfig.kms_key]-encrypted + parameters that could not be decrypted. A response to a + ``jobs.get`` request may indicate the reason for the + decryption failure for a specific job. """ @property @@ -1659,6 +1757,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CancelJobRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py index ed37c1b8a565..656453c5fe33 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py @@ -54,10 +54,11 @@ class Component(proto.Enum): Unspecified component. Specifying this will cause Cluster creation to fail. ANACONDA (5): - The Anaconda python distribution. The Anaconda component is - not supported in the Dataproc [2.0 image] - (/https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-release-2.0). - The 2.0 image is pre-installed with Miniconda. + The Anaconda component is no longer supported or applicable + to [supported Dataproc on Compute Engine image versions] + (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-version-clusters#supported-dataproc-image-versions). + It cannot be activated on clusters created with supported + Dataproc on Compute Engine image versions. DOCKER (13): Docker DRUID (9): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py index 3526627e14b0..5022f3e353a7 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py @@ -124,8 +124,51 @@ class WorkflowTemplate(proto.Message): `managed cluster `__, the cluster is deleted. + encryption_config (google.cloud.dataproc_v1.types.WorkflowTemplate.EncryptionConfig): + Optional. Encryption settings for encrypting + workflow template job arguments. """ + class EncryptionConfig(proto.Message): + r"""Encryption settings for encrypting workflow template job + arguments. + + Attributes: + kms_key (str): + Optional. The Cloud KMS key name to use for encrypting + workflow template job arguments. + + When this this key is provided, the following workflow + template [job arguments] + (https://cloud.google.com/dataproc/docs/concepts/workflows/use-workflows#adding_jobs_to_a_template), + if present, are `CMEK + encrypted `__: + + - `FlinkJob + args `__ + - `HadoopJob + args `__ + - `SparkJob + args `__ + - `SparkRJob + args `__ + - `PySparkJob + args `__ + - `SparkSqlJob `__ + scriptVariables and queryList.queries + - `HiveJob `__ + scriptVariables and queryList.queries + - `PigJob `__ + scriptVariables and queryList.queries + - `PrestoJob `__ + scriptVariables and queryList.queries + """ + + kms_key: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( proto.STRING, number=2, @@ -173,6 +216,11 @@ class WorkflowTemplate(proto.Message): number=10, message=duration_pb2.Duration, ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=11, + message=EncryptionConfig, + ) class WorkflowTemplatePlacement(proto.Message): @@ -346,6 +394,14 @@ class OrderedJob(proto.Message): presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. + This field is a member of `oneof`_ ``job_type``. + trino_job (google.cloud.dataproc_v1.types.TrinoJob): + Optional. Job is a Trino job. + + This field is a member of `oneof`_ ``job_type``. + flink_job (google.cloud.dataproc_v1.types.FlinkJob): + Optional. Job is a Flink job. + This field is a member of `oneof`_ ``job_type``. labels (MutableMapping[str, str]): Optional. The labels to associate with this job. @@ -419,6 +475,18 @@ class OrderedJob(proto.Message): oneof="job_type", message=gcd_jobs.PrestoJob, ) + trino_job: gcd_jobs.TrinoJob = proto.Field( + proto.MESSAGE, + number=13, + oneof="job_type", + message=gcd_jobs.TrinoJob, + ) + flink_job: gcd_jobs.FlinkJob = proto.Field( + proto.MESSAGE, + number=14, + oneof="job_type", + message=gcd_jobs.FlinkJob, + ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -1095,6 +1163,12 @@ class ListWorkflowTemplatesResponse(proto.Message): are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListWorkflowTemplatesRequest. + unreachable (MutableSequence[str]): + Output only. List of workflow templates that + could not be included in the response. + Attempting to get one of these resources may + indicate why it was not included in the list + response. """ @property @@ -1110,6 +1184,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteWorkflowTemplateRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c1b4b338fe39..c5f4e003db04 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index 4e92cbcfc4a1..b1b1cf5bcb8b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -1953,6 +1953,7 @@ def test_list_batches(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_batches(request) @@ -1965,6 +1966,7 @@ def test_list_batches(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_batches_empty_call(): @@ -2070,6 +2072,7 @@ async def test_list_batches_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_batches() @@ -2139,6 +2142,7 @@ async def test_list_batches_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_batches(request) @@ -2152,6 +2156,7 @@ async def test_list_batches_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -3648,6 +3653,7 @@ def test_list_batches_rest(request_type): # Designate an appropriate value for the returned response. return_value = batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -3664,6 +3670,7 @@ def test_list_batches_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_batches_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 099921eb7e2b..6910a6fc2fa4 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -4476,7 +4476,10 @@ def test_create_cluster_rest(request_type): "execution_timeout": {"seconds": 751, "nanos": 543}, } ], - "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", + }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { "kerberos_config": { @@ -5082,7 +5085,10 @@ def test_update_cluster_rest(request_type): "execution_timeout": {"seconds": 751, "nanos": 543}, } ], - "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", + }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { "kerberos_config": { @@ -8167,11 +8173,42 @@ def test_parse_cluster_path(): assert expected == actual -def test_node_group_path(): +def test_crypto_key_path(): project = "cuttlefish" - region = "mussel" - cluster = "winkle" - node_group = "nautilus" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = ClusterControllerClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = ClusterControllerClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "whelk" + region = "octopus" + cluster = "oyster" + node_group = "nudibranch" expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( project=project, region=region, @@ -8186,10 +8223,10 @@ def test_node_group_path(): def test_parse_node_group_path(): expected = { - "project": "scallop", - "region": "abalone", - "cluster": "squid", - "node_group": "clam", + "project": "cuttlefish", + "region": "mussel", + "cluster": "winkle", + "node_group": "nautilus", } path = ClusterControllerClient.node_group_path(**expected) @@ -8199,9 +8236,9 @@ def test_parse_node_group_path(): def test_service_path(): - project = "whelk" - location = "octopus" - service = "oyster" + project = "scallop" + location = "abalone" + service = "squid" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -8213,9 +8250,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "service": "mussel", + "project": "clam", + "location": "whelk", + "service": "octopus", } path = ClusterControllerClient.service_path(**expected) @@ -8225,7 +8262,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8235,7 +8272,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = ClusterControllerClient.common_billing_account_path(**expected) @@ -8245,7 +8282,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8255,7 +8292,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = ClusterControllerClient.common_folder_path(**expected) @@ -8265,7 +8302,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8275,7 +8312,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = ClusterControllerClient.common_organization_path(**expected) @@ -8285,7 +8322,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8295,7 +8332,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = ClusterControllerClient.common_project_path(**expected) @@ -8305,8 +8342,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8317,8 +8354,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = ClusterControllerClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index 1d12641b7a95..8b8450a8006f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -2368,6 +2368,7 @@ def test_list_jobs(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_jobs(request) @@ -2380,6 +2381,7 @@ def test_list_jobs(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_jobs_empty_call(): @@ -2487,6 +2489,7 @@ async def test_list_jobs_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_jobs() @@ -2554,6 +2557,7 @@ async def test_list_jobs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_jobs(request) @@ -2567,6 +2571,7 @@ async def test_list_jobs_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -5008,6 +5013,7 @@ def test_list_jobs_rest(request_type): # Designate an appropriate value for the returned response. return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -5024,6 +5030,7 @@ def test_list_jobs_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_jobs_rest_use_cached_wrapped_rpc(): @@ -5477,6 +5484,15 @@ def test_update_job_rest(request_type): "properties": {}, "logging_config": {}, }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "status": { "state": 1, "details": "details_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 67bc73b8e99b..c996bc73de7b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -3238,6 +3238,7 @@ def test_list_workflow_templates(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_workflow_templates(request) @@ -3250,6 +3251,7 @@ def test_list_workflow_templates(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_workflow_templates_empty_call(): @@ -3362,6 +3364,7 @@ async def test_list_workflow_templates_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_workflow_templates() @@ -3434,6 +3437,7 @@ async def test_list_workflow_templates_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_workflow_templates(request) @@ -3447,6 +3451,7 @@ async def test_list_workflow_templates_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -4307,7 +4312,8 @@ def test_create_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -4448,6 +4454,24 @@ def test_create_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -4471,6 +4495,7 @@ def test_create_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5615,7 +5640,8 @@ def test_instantiate_inline_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -5756,6 +5782,24 @@ def test_instantiate_inline_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -5779,6 +5823,7 @@ def test_instantiate_inline_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6289,7 +6334,8 @@ def test_update_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -6430,6 +6476,24 @@ def test_update_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -6453,6 +6517,7 @@ def test_update_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6849,6 +6914,7 @@ def test_list_workflow_templates_rest(request_type): # Designate an appropriate value for the returned response. return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -6865,6 +6931,7 @@ def test_list_workflow_templates_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_workflow_templates_rest_use_cached_wrapped_rpc(): @@ -8134,11 +8201,42 @@ def test_workflow_template_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_node_group_path(): +def test_crypto_key_path(): project = "squid" - region = "clam" - cluster = "whelk" - node_group = "octopus" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = WorkflowTemplateServiceClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = WorkflowTemplateServiceClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "winkle" + region = "nautilus" + cluster = "scallop" + node_group = "abalone" expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( project=project, region=region, @@ -8153,10 +8251,10 @@ def test_node_group_path(): def test_parse_node_group_path(): expected = { - "project": "oyster", - "region": "nudibranch", - "cluster": "cuttlefish", - "node_group": "mussel", + "project": "squid", + "region": "clam", + "cluster": "whelk", + "node_group": "octopus", } path = WorkflowTemplateServiceClient.node_group_path(**expected) @@ -8166,9 +8264,9 @@ def test_parse_node_group_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -8180,9 +8278,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = WorkflowTemplateServiceClient.service_path(**expected) @@ -8192,9 +8290,9 @@ def test_parse_service_path(): def test_workflow_template_path(): - project = "whelk" - region = "octopus" - workflow_template = "oyster" + project = "scallop" + region = "abalone" + workflow_template = "squid" expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, @@ -8208,9 +8306,9 @@ def test_workflow_template_path(): def test_parse_workflow_template_path(): expected = { - "project": "nudibranch", - "region": "cuttlefish", - "workflow_template": "mussel", + "project": "clam", + "region": "whelk", + "workflow_template": "octopus", } path = WorkflowTemplateServiceClient.workflow_template_path(**expected) @@ -8220,7 +8318,7 @@ def test_parse_workflow_template_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8230,7 +8328,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = WorkflowTemplateServiceClient.common_billing_account_path(**expected) @@ -8240,7 +8338,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8250,7 +8348,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = WorkflowTemplateServiceClient.common_folder_path(**expected) @@ -8260,7 +8358,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8270,7 +8368,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = WorkflowTemplateServiceClient.common_organization_path(**expected) @@ -8280,7 +8378,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8290,7 +8388,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = WorkflowTemplateServiceClient.common_project_path(**expected) @@ -8300,8 +8398,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8312,8 +8410,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = WorkflowTemplateServiceClient.common_location_path(**expected) From 76267b2b8998fd2a3602ebf4d12d2aaa30a90cde Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 00:57:09 +0000 Subject: [PATCH 15/59] feat: [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` (#13074) - [ ] Regenerate this pull request now. feat: A new value `CANCELLED` is added to enum `State` PiperOrigin-RevId: 673051518 Source-Link: https://github.com/googleapis/googleapis/commit/2b46b7546bd801cf9bc9449843666c4b55fc574d Source-Link: https://github.com/googleapis/googleapis-gen/commit/4083dff5bd4f0c0136aca9f226ae89f58a669069 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiI0MDgzZGZmNWJkNGYwYzAxMzZhY2E5ZjIyNmFlODlmNThhNjY5MDY5In0= --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/types/job.py | 10 ++++++++++ .../snippet_metadata_google.cloud.batch.v1.json | 2 +- .../snippet_metadata_google.cloud.batch.v1alpha.json | 2 +- 6 files changed, 15 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 7de8a6a6838d..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 7de8a6a6838d..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 7de8a6a6838d..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 666e309ad00b..744d6bdb9a8b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -332,6 +332,14 @@ class State(proto.Enum): The Job will be deleted, but has not been deleted yet. Typically this is because resources used by the Job are still being cleaned up. + CANCELLATION_IN_PROGRESS (7): + The Job cancellation is in progress, this is + because the resources used by the Job are still + being cleaned up. + CANCELLED (8): + The Job has been cancelled, the task + executions were stopped and the resources were + cleaned up. """ STATE_UNSPECIFIED = 0 QUEUED = 1 @@ -340,6 +348,8 @@ class State(proto.Enum): SUCCEEDED = 4 FAILED = 5 DELETION_IN_PROGRESS = 6 + CANCELLATION_IN_PROGRESS = 7 + CANCELLED = 8 class InstanceStatus(proto.Message): r"""VM instance status. diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 1a9ad7a0b658..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.27" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 9c3638c4d767..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.27" + "version": "0.1.0" }, "snippets": [ { From b624f04da8a9b6461d4714f0f0bcf13f1f35fa31 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:00:39 +0000 Subject: [PATCH 16/59] feat: [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` (#13077) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` feat: [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` feat: A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` feat: A new message `CheckUpgradeRequest` is added feat: A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` feat: A new message `AirflowMetadataRetentionPolicyConfig` is added docs: A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed docs: A comment for message `WorkloadsConfig` is changed docs: A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed END_COMMIT_OVERRIDE PiperOrigin-RevId: 673910740 Source-Link: https://github.com/googleapis/googleapis/commit/dcc4f933bfbc0bc805187ae7f65b3b6be23fd1c3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0683c793d18547a017f446533bed4bcd09f565d9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yY2hlc3RyYXRpb24tYWlyZmxvdy8uT3dsQm90LnlhbWwiLCJoIjoiMDY4M2M3OTNkMTg1NDdhMDE3ZjQ0NjUzM2JlZDRiY2QwOWY1NjVkOSJ9 BEGIN_NESTED_COMMIT feat: [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` feat: A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` feat: A new message `CheckUpgradeRequest` is added feat: A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` feat: A new message `AirflowMetadataRetentionPolicyConfig` is added docs: A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed docs: A comment for message `WorkloadsConfig` is changed docs: A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed PiperOrigin-RevId: 673766368 Source-Link: https://github.com/googleapis/googleapis/commit/0f44538daf93e648e4fe5529acf8219cef3a0a39 Source-Link: https://github.com/googleapis/googleapis-gen/commit/802f7c8cdf887527e99fa9c0d774adfd33a16ffe Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yY2hlc3RyYXRpb24tYWlyZmxvdy8uT3dsQm90LnlhbWwiLCJoIjoiODAyZjdjOGNkZjg4NzUyN2U5OWZhOWMwZDc3NGFkZmQzM2ExNmZmZSJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../orchestration/airflow/service/__init__.py | 4 + .../airflow/service_v1/__init__.py | 4 + .../airflow/service_v1/gapic_metadata.json | 15 + .../services/environments/async_client.py | 103 ++++ .../services/environments/client.py | 101 ++++ .../services/environments/transports/base.py | 14 + .../services/environments/transports/grpc.py | 29 + .../environments/transports/grpc_asyncio.py | 36 ++ .../services/environments/transports/rest.py | 133 +++++ .../airflow/service_v1/types/__init__.py | 4 + .../airflow/service_v1/types/environments.py | 121 +++- .../service_v1beta1/types/environments.py | 6 + ...erated_environments_check_upgrade_async.py | 56 ++ ...nerated_environments_check_upgrade_sync.py | 56 ++ ...loud.orchestration.airflow.service.v1.json | 153 +++++ .../scripts/fixup_service_v1_keywords.py | 1 + .../gapic/service_v1/test_environments.py | 561 +++++++++++++++++- .../service_v1beta1/test_environments.py | 9 + 18 files changed, 1400 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py create mode 100644 packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py index b79f7274cddc..962fbf440f12 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py @@ -31,6 +31,8 @@ ImageVersionsClient, ) from google.cloud.orchestration.airflow.service_v1.types.environments import ( + AirflowMetadataRetentionPolicyConfig, + CheckUpgradeRequest, CheckUpgradeResponse, CloudDataLineageIntegration, CreateEnvironmentRequest, @@ -104,6 +106,8 @@ "EnvironmentsAsyncClient", "ImageVersionsClient", "ImageVersionsAsyncClient", + "AirflowMetadataRetentionPolicyConfig", + "CheckUpgradeRequest", "CheckUpgradeResponse", "CloudDataLineageIntegration", "CreateEnvironmentRequest", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py index a30d9c61ae59..2cab45afb1ee 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py @@ -23,6 +23,8 @@ from .services.environments import EnvironmentsAsyncClient, EnvironmentsClient from .services.image_versions import ImageVersionsAsyncClient, ImageVersionsClient from .types.environments import ( + AirflowMetadataRetentionPolicyConfig, + CheckUpgradeRequest, CheckUpgradeResponse, CloudDataLineageIntegration, CreateEnvironmentRequest, @@ -92,6 +94,8 @@ __all__ = ( "EnvironmentsAsyncClient", "ImageVersionsAsyncClient", + "AirflowMetadataRetentionPolicyConfig", + "CheckUpgradeRequest", "CheckUpgradeResponse", "CloudDataLineageIntegration", "CreateEnvironmentRequest", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json index 3511b1f8d28f..aca0f03a3f15 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "EnvironmentsClient", "rpcs": { + "CheckUpgrade": { + "methods": [ + "check_upgrade" + ] + }, "CreateEnvironment": { "methods": [ "create_environment" @@ -130,6 +135,11 @@ "grpc-async": { "libraryClient": "EnvironmentsAsyncClient", "rpcs": { + "CheckUpgrade": { + "methods": [ + "check_upgrade" + ] + }, "CreateEnvironment": { "methods": [ "create_environment" @@ -250,6 +260,11 @@ "rest": { "libraryClient": "EnvironmentsClient", "rpcs": { + "CheckUpgrade": { + "methods": [ + "check_upgrade" + ] + }, "CreateEnvironment": { "methods": [ "create_environment" diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 3406e2214500..849eed1fbf2d 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -1448,6 +1448,109 @@ async def sample_list_workloads(): # Done; return the response. return response + async def check_upgrade( + self, + request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest, dict]]): + The request object. Request to check whether image + upgrade will succeed. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeResponse` Message containing information about the result of an upgrade check + operation. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, environments.CheckUpgradeRequest): + request = environments.CheckUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.check_upgrade + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.CheckUpgradeResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + async def create_user_workloads_secret( self, request: Optional[ diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index a00e4bf1e986..6a8635aa5380 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -1884,6 +1884,107 @@ def sample_list_workloads(): # Done; return the response. return response + def check_upgrade( + self, + request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest, dict]): + The request object. Request to check whether image + upgrade will succeed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeResponse` Message containing information about the result of an upgrade check + operation. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, environments.CheckUpgradeRequest): + request = environments.CheckUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_upgrade] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.CheckUpgradeResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + def create_user_workloads_secret( self, request: Optional[ diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 735acee260d9..05b1c3627955 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -177,6 +177,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.check_upgrade: gapic_v1.method.wrap_method( + self.check_upgrade, + default_timeout=None, + client_info=client_info, + ), self.create_user_workloads_secret: gapic_v1.method.wrap_method( self.create_user_workloads_secret, default_timeout=None, @@ -359,6 +364,15 @@ def list_workloads( ]: raise NotImplementedError() + @property + def check_upgrade( + self, + ) -> Callable[ + [environments.CheckUpgradeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_user_workloads_secret( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py index 78c4216a1d96..1d424d9d8162 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py @@ -502,6 +502,35 @@ def list_workloads( ) return self._stubs["list_workloads"] + @property + def check_upgrade( + self, + ) -> Callable[[environments.CheckUpgradeRequest], operations_pb2.Operation]: + r"""Return a callable for the check upgrade method over gRPC. + + Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + Returns: + Callable[[~.CheckUpgradeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_upgrade" not in self._stubs: + self._stubs["check_upgrade"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/CheckUpgrade", + request_serializer=environments.CheckUpgradeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["check_upgrade"] + @property def create_user_workloads_secret( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py index cbe9ca640a9c..0c408a50e4cf 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py @@ -518,6 +518,37 @@ def list_workloads( ) return self._stubs["list_workloads"] + @property + def check_upgrade( + self, + ) -> Callable[ + [environments.CheckUpgradeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the check upgrade method over gRPC. + + Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + Returns: + Callable[[~.CheckUpgradeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_upgrade" not in self._stubs: + self._stubs["check_upgrade"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/CheckUpgrade", + request_serializer=environments.CheckUpgradeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["check_upgrade"] + @property def create_user_workloads_secret( self, @@ -1021,6 +1052,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.check_upgrade: gapic_v1.method_async.wrap_method( + self.check_upgrade, + default_timeout=None, + client_info=client_info, + ), self.create_user_workloads_secret: gapic_v1.method_async.wrap_method( self.create_user_workloads_secret, default_timeout=None, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py index a735deff7f54..6a8be243f2a9 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py @@ -72,6 +72,14 @@ class EnvironmentsRestInterceptor: .. code-block:: python class MyCustomEnvironmentsInterceptor(EnvironmentsRestInterceptor): + def pre_check_upgrade(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_upgrade(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -254,6 +262,29 @@ def post_update_user_workloads_secret(self, response): """ + def pre_check_upgrade( + self, + request: environments.CheckUpgradeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.CheckUpgradeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check_upgrade + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_check_upgrade( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for check_upgrade + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_create_environment( self, request: environments.CreateEnvironmentRequest, @@ -988,6 +1019,100 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CheckUpgrade(EnvironmentsRestStub): + def __hash__(self): + return hash("CheckUpgrade") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environments.CheckUpgradeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the check upgrade method over HTTP. + + Args: + request (~.environments.CheckUpgradeRequest): + The request object. Request to check whether image + upgrade will succeed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:checkUpgrade", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_check_upgrade(request, metadata) + pb_request = environments.CheckUpgradeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_upgrade(resp) + return resp + class _CreateEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("CreateEnvironment") @@ -2958,6 +3083,14 @@ def __call__( resp = self._interceptor.post_update_user_workloads_secret(resp) return resp + @property + def check_upgrade( + self, + ) -> Callable[[environments.CheckUpgradeRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckUpgrade(self._session, self._host, self._interceptor) # type: ignore + @property def create_environment( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py index 873b24e7b536..05d6386b5e43 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py @@ -14,6 +14,8 @@ # limitations under the License. # from .environments import ( + AirflowMetadataRetentionPolicyConfig, + CheckUpgradeRequest, CheckUpgradeResponse, CloudDataLineageIntegration, CreateEnvironmentRequest, @@ -81,6 +83,8 @@ from .operations import OperationMetadata __all__ = ( + "AirflowMetadataRetentionPolicyConfig", + "CheckUpgradeRequest", "CheckUpgradeResponse", "CloudDataLineageIntegration", "CreateEnvironmentRequest", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py index 5414a93ee0fe..05f5049857da 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py @@ -79,9 +79,11 @@ "MasterAuthorizedNetworksConfig", "CloudDataLineageIntegration", "Environment", + "CheckUpgradeRequest", "CheckUpgradeResponse", "DataRetentionConfig", "TaskLogsRetentionConfig", + "AirflowMetadataRetentionPolicyConfig", }, ) @@ -1392,8 +1394,10 @@ class EnvironmentConfig(proto.Message): hours. If this value is omitted, the default value for - maintenance window will be applied. The default - value is Saturday and Sunday 00-06 GMT. + maintenance window is applied. By default, + maintenance windows are from 00:00:00 to + 04:00:00 (GMT) on Friday, Saturday, and Sunday + every week. workloads_config (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig): Optional. The workloads configuration settings for the GKE cluster associated with the Cloud Composer environment. The @@ -2559,6 +2563,9 @@ class TriggererResource(proto.Message): class DagProcessorResource(proto.Message): r"""Configuration for resources used by Airflow DAG processors. + This field is supported for Cloud Composer environments in versions + composer-3.\ *.*-airflow-*.*.\* and newer. + Attributes: cpu (float): Optional. CPU request and limit for a single @@ -2778,6 +2785,8 @@ class Environment(proto.Message): <= 128 bytes in size. satisfies_pzs (bool): Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. storage_config (google.cloud.orchestration.airflow.service_v1.types.StorageConfig): Optional. Storage configuration for this environment. @@ -2850,6 +2859,10 @@ class State(proto.Enum): proto.BOOL, number=8, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=10, + ) storage_config: "StorageConfig" = proto.Field( proto.MESSAGE, number=9, @@ -2857,6 +2870,58 @@ class State(proto.Enum): ) +class CheckUpgradeRequest(proto.Message): + r"""Request to check whether image upgrade will succeed. + + Attributes: + environment (str): + Required. The resource name of the + environment to check upgrade for, in the form: + + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + image_version (str): + Optional. The version of the software running in the + environment. This encapsulates both the version of Cloud + Composer functionality and the version of Apache Airflow. It + must match the regular expression + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. + When used as input, the server also checks if the provided + version is supported and denies the request for an + unsupported version. + + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + image_version: str = proto.Field( + proto.STRING, + number=2, + ) + + class CheckUpgradeResponse(proto.Message): r"""Message containing information about the result of an upgrade check operation. @@ -2927,11 +2992,21 @@ class DataRetentionConfig(proto.Message): mechanism. Attributes: + airflow_metadata_retention_config (google.cloud.orchestration.airflow.service_v1.types.AirflowMetadataRetentionPolicyConfig): + Optional. The retention policy for airflow + metadata database. task_logs_retention_config (google.cloud.orchestration.airflow.service_v1.types.TaskLogsRetentionConfig): Optional. The configuration settings for task logs retention """ + airflow_metadata_retention_config: "AirflowMetadataRetentionPolicyConfig" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="AirflowMetadataRetentionPolicyConfig", + ) + ) task_logs_retention_config: "TaskLogsRetentionConfig" = proto.Field( proto.MESSAGE, number=2, @@ -2945,8 +3020,7 @@ class TaskLogsRetentionConfig(proto.Message): Attributes: storage_mode (google.cloud.orchestration.airflow.service_v1.types.TaskLogsRetentionConfig.TaskLogsStorageMode): Optional. The mode of storage for Airflow - workers task logs. For details, see - go/composer-store-task-logs-in-cloud-logging-only-design-doc + workers task logs. """ class TaskLogsStorageMode(proto.Enum): @@ -2973,4 +3047,43 @@ class TaskLogsStorageMode(proto.Enum): ) +class AirflowMetadataRetentionPolicyConfig(proto.Message): + r"""The policy for airflow metadata database retention. + + Attributes: + retention_mode (google.cloud.orchestration.airflow.service_v1.types.AirflowMetadataRetentionPolicyConfig.RetentionMode): + Optional. Retention can be either enabled or + disabled. + retention_days (int): + Optional. How many days data should be + retained for. + """ + + class RetentionMode(proto.Enum): + r"""Describes retention policy. + + Values: + RETENTION_MODE_UNSPECIFIED (0): + Default mode doesn't change environment + parameters. + RETENTION_MODE_ENABLED (1): + Retention policy is enabled. + RETENTION_MODE_DISABLED (2): + Retention policy is disabled. + """ + RETENTION_MODE_UNSPECIFIED = 0 + RETENTION_MODE_ENABLED = 1 + RETENTION_MODE_DISABLED = 2 + + retention_mode: RetentionMode = proto.Field( + proto.ENUM, + number=1, + enum=RetentionMode, + ) + retention_days: int = proto.Field( + proto.INT32, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py index b83e81eb209b..a7d029a3a945 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py @@ -2957,6 +2957,8 @@ class Environment(proto.Message): <= 128 bytes in size. satisfies_pzs (bool): Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. storage_config (google.cloud.orchestration.airflow.service_v1beta1.types.StorageConfig): Optional. Storage configuration for this environment. @@ -3029,6 +3031,10 @@ class State(proto.Enum): proto.BOOL, number=8, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=10, + ) storage_config: "StorageConfig" = proto.Field( proto.MESSAGE, number=9, diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py new file mode 100644 index 000000000000..6b0d1d7e863b --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckUpgrade +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_CheckUpgrade_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_CheckUpgrade_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py new file mode 100644 index 000000000000..f8e30156ed47 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckUpgrade +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_CheckUpgrade_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_CheckUpgrade_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 3fffc7af33c9..389370672713 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -11,6 +11,159 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.check_upgrade", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.CheckUpgrade", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "CheckUpgrade" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "check_upgrade" + }, + "description": "Sample for CheckUpgrade", + "file": "composer_v1_generated_environments_check_upgrade_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_CheckUpgrade_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_check_upgrade_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.check_upgrade", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.CheckUpgrade", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "CheckUpgrade" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "check_upgrade" + }, + "description": "Sample for CheckUpgrade", + "file": "composer_v1_generated_environments_check_upgrade_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_CheckUpgrade_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_check_upgrade_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py index dcf2905a8c0c..1d453d96293f 100644 --- a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py +++ b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py @@ -39,6 +39,7 @@ def partition( class serviceCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'check_upgrade': ('environment', 'image_version', ), 'create_environment': ('parent', 'environment', ), 'create_user_workloads_config_map': ('parent', 'user_workloads_config_map', ), 'create_user_workloads_secret': ('parent', 'user_workloads_secret', ), diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py index 3751f10f0bbf..ddd117361ace 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py @@ -1531,6 +1531,7 @@ def test_get_environment(request_type, transport: str = "grpc"): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_environment(request) @@ -1546,6 +1547,7 @@ def test_get_environment(request_type, transport: str = "grpc"): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_empty_call(): @@ -1648,6 +1650,7 @@ async def test_get_environment_empty_call_async(): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment() @@ -1720,6 +1723,7 @@ async def test_get_environment_async( uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment(request) @@ -1736,6 +1740,7 @@ async def test_get_environment_async( assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -4783,6 +4788,293 @@ async def test_list_workloads_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + environments.CheckUpgradeRequest, + dict, + ], +) +def test_check_upgrade(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = environments.CheckUpgradeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_check_upgrade_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.CheckUpgradeRequest() + + +def test_check_upgrade_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = environments.CheckUpgradeRequest( + environment="environment_value", + image_version="image_version_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_upgrade(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.CheckUpgradeRequest( + environment="environment_value", + image_version="image_version_value", + ) + + +def test_check_upgrade_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_upgrade in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.check_upgrade] = mock_rpc + request = {} + client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.check_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_upgrade_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.check_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.CheckUpgradeRequest() + + +@pytest.mark.asyncio +async def test_check_upgrade_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.check_upgrade + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.check_upgrade + ] = mock_rpc + + request = {} + await client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.check_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_upgrade_async( + transport: str = "grpc_asyncio", request_type=environments.CheckUpgradeRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = environments.CheckUpgradeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_check_upgrade_async_from_dict(): + await test_check_upgrade_async(request_type=dict) + + +def test_check_upgrade_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.CheckUpgradeRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_upgrade_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.CheckUpgradeRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -10400,7 +10692,11 @@ def test_create_environment_rest(request_type): }, "resilience_mode": 1, "data_retention_config": { - "task_logs_retention_config": {"storage_mode": 1} + "airflow_metadata_retention_config": { + "retention_mode": 1, + "retention_days": 1512, + }, + "task_logs_retention_config": {"storage_mode": 1}, }, }, "uuid": "uuid_value", @@ -10409,6 +10705,7 @@ def test_create_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -10713,6 +11010,7 @@ def test_get_environment_rest(request_type): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -10732,6 +11030,7 @@ def test_get_environment_rest(request_type): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_rest_use_cached_wrapped_rpc(): @@ -11324,7 +11623,11 @@ def test_update_environment_rest(request_type): }, "resilience_mode": 1, "data_retention_config": { - "task_logs_retention_config": {"storage_mode": 1} + "airflow_metadata_retention_config": { + "retention_mode": 1, + "retention_days": 1512, + }, + "task_logs_retention_config": {"storage_mode": 1}, }, }, "uuid": "uuid_value", @@ -11333,6 +11636,7 @@ def test_update_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -12747,6 +13051,255 @@ def test_list_workloads_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + environments.CheckUpgradeRequest, + dict, + ], +) +def test_check_upgrade_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_upgrade(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_check_upgrade_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_upgrade in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.check_upgrade] = mock_rpc + + request = {} + client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.check_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_check_upgrade_rest_required_fields( + request_type=environments.CheckUpgradeRequest, +): + transport_class = transports.EnvironmentsRestTransport + + request_init = {} + request_init["environment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_upgrade._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["environment"] = "environment_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_upgrade._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "environment" in jsonified_request + assert jsonified_request["environment"] == "environment_value" + + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.check_upgrade(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_check_upgrade_rest_unset_required_fields(): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.check_upgrade._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("environment",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_upgrade_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_check_upgrade" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_check_upgrade" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.CheckUpgradeRequest.pb( + environments.CheckUpgradeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = environments.CheckUpgradeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.check_upgrade( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_upgrade_rest_bad_request( + transport: str = "rest", request_type=environments.CheckUpgradeRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_upgrade(request) + + +def test_check_upgrade_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -17095,6 +17648,7 @@ def test_environments_base_transport(): "stop_airflow_command", "poll_airflow_command", "list_workloads", + "check_upgrade", "create_user_workloads_secret", "get_user_workloads_secret", "list_user_workloads_secrets", @@ -17416,6 +17970,9 @@ def test_environments_client_transport_session_collision(transport_name): session1 = client1.transport.list_workloads._session session2 = client2.transport.list_workloads._session assert session1 != session2 + session1 = client1.transport.check_upgrade._session + session2 = client2.transport.check_upgrade._session + assert session1 != session2 session1 = client1.transport.create_user_workloads_secret._session session2 = client2.transport.create_user_workloads_secret._session assert session1 != session2 diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py index 5cef23532261..053e5f28d823 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py @@ -1534,6 +1534,7 @@ def test_get_environment(request_type, transport: str = "grpc"): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_environment(request) @@ -1549,6 +1550,7 @@ def test_get_environment(request_type, transport: str = "grpc"): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_empty_call(): @@ -1651,6 +1653,7 @@ async def test_get_environment_empty_call_async(): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment() @@ -1723,6 +1726,7 @@ async def test_get_environment_async( uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment(request) @@ -1739,6 +1743,7 @@ async def test_get_environment_async( assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -11008,6 +11013,7 @@ def test_create_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -11312,6 +11318,7 @@ def test_get_environment_rest(request_type): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -11331,6 +11338,7 @@ def test_get_environment_rest(request_type): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_rest_use_cached_wrapped_rpc(): @@ -11938,6 +11946,7 @@ def test_update_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. From a20b1e508068845c36b1701836ba17a699cb10ac Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:03:49 +0000 Subject: [PATCH 17/59] feat: [google-apps-chat] If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces an... (#13081) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: - SearchSpaces - DeleteSpace - UpdateSpace - GetSpace - ListMemberships - DeleteMembership - CreateMembership - UpdateMembership - GetMembership Additionally, `last_active_time` and `membership_count` parameters are added to the `Space` resource. docs: A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator END_COMMIT_OVERRIDE d memberships in your Workspace organization: - SearchSpaces - DeleteSpace - UpdateSpace - GetSpace - ListMemberships - DeleteMembership - CreateMembership - UpdateMembership - GetMembership Additionally, `last_active_time` and `membership_count` parameters are added to the `Space` resource. docs: A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator PiperOrigin-RevId: 673895888 Source-Link: https://github.com/googleapis/googleapis/commit/c5bc296a6d5e0b9344e2a2aef90cefb017ae29ad Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ed1ebf58a6d432da9a23de6d9d9d058c21d9a44 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiMWVkMWViZjU4YTZkNDMyZGE5YTIzZGU2ZDlkOWQwNThjMjFkOWE0NCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/apps/chat/__init__.py | 4 + .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/__init__.py | 4 + .../google/apps/chat_v1/gapic_metadata.json | 15 + .../google/apps/chat_v1/gapic_version.py | 2 +- .../services/chat_service/async_client.py | 97 ++ .../chat_v1/services/chat_service/client.py | 95 ++ .../chat_v1/services/chat_service/pagers.py | 152 +++ .../services/chat_service/transports/base.py | 23 + .../services/chat_service/transports/grpc.py | 30 + .../chat_service/transports/grpc_asyncio.py | 44 + .../services/chat_service/transports/rest.py | 128 +++ .../google/apps/chat_v1/types/__init__.py | 4 + .../google/apps/chat_v1/types/membership.py | 89 +- .../google/apps/chat_v1/types/space.py | 287 ++++++ ...erated_chat_service_search_spaces_async.py | 53 + ...nerated_chat_service_search_spaces_sync.py | 53 + .../snippet_metadata_google.chat.v1.json | 155 ++- .../scripts/fixup_chat_v1_keywords.py | 17 +- .../unit/gapic/chat_v1/test_chat_service.py | 924 ++++++++++++++++-- 20 files changed, 2108 insertions(+), 70 deletions(-) create mode 100644 packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py create mode 100644 packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index ad2d607382db..17679ffaa12a 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -111,6 +111,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -212,6 +214,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index 8fe816081153..d770a6fbb7ff 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -108,6 +108,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -201,6 +203,8 @@ "ReactionCreatedEventData", "ReactionDeletedEventData", "RichLinkMetadata", + "SearchSpacesRequest", + "SearchSpacesResponse", "SetUpSpaceRequest", "SlashCommand", "SlashCommandMetadata", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json index 3cdf90b4e07e..325f347acf03 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json @@ -120,6 +120,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -265,6 +270,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -410,6 +420,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 945434859bde..82774eb03431 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -1535,6 +1535,103 @@ async def sample_list_spaces(): # Done; return the response. return response + async def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesAsyncPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_spaces + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchSpacesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 268ff696d3a6..0d542091414a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -2071,6 +2071,101 @@ def sample_list_spaces(): # Done; return the response. return response + def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_spaces] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchSpacesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py index d7565e952ff8..94763fc39240 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py @@ -497,6 +497,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class SearchSpacesPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., space.SearchSpacesResponse], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[space.Space]: + for page in self.pages: + yield from page.spaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchSpacesAsyncPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[space.SearchSpacesResponse]], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[space.Space]: + async def async_generator(): + async for page in self.pages: + for response in page.spaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListReactionsPager: """A pager for iterating through ``list_reactions`` requests. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py index e5b32aaf272b..5c83cc5f462e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py @@ -304,6 +304,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method.wrap_method( + self.search_spaces, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method.wrap_method( self.get_space, default_retry=retries.Retry( @@ -662,6 +676,15 @@ def list_spaces( ]: raise NotImplementedError() + @property + def search_spaces( + self, + ) -> Callable[ + [space.SearchSpacesRequest], + Union[space.SearchSpacesResponse, Awaitable[space.SearchSpacesResponse]], + ]: + raise NotImplementedError() + @property def get_space( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index 2ef3b8c317bb..bfb0492b21e8 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -619,6 +619,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + ~.SearchSpacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: r"""Return a callable for the get space method over gRPC. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index e36b81f08a6c..a404fca34305 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -629,6 +629,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], Awaitable[space.SearchSpacesResponse]]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + Awaitable[~.SearchSpacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], Awaitable[space.Space]]: r"""Return a callable for the get space method over gRPC. @@ -1485,6 +1515,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method_async.wrap_method( + self.search_spaces, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method_async.wrap_method( self.get_space, default_retry=retries.AsyncRetry( diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index 87947ff116c8..f9c4a5cd53a3 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -241,6 +241,14 @@ def post_list_spaces(self, response): logging.log(f"Received response: {response}") return response + def pre_search_spaces(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_spaces(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_up_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -744,6 +752,27 @@ def post_list_spaces( """ return response + def pre_search_spaces( + self, request: space.SearchSpacesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[space.SearchSpacesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_spaces + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_search_spaces( + self, response: space.SearchSpacesResponse + ) -> space.SearchSpacesResponse: + """Post-rpc interceptor for search_spaces + + Override in a subclass to manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. + """ + return response + def pre_set_up_space( self, request: space_setup.SetUpSpaceRequest, @@ -2908,6 +2937,97 @@ def __call__( resp = self._interceptor.post_list_spaces(resp) return resp + class _SearchSpaces(ChatServiceRestStub): + def __hash__(self): + return hash("SearchSpaces") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: space.SearchSpacesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> space.SearchSpacesResponse: + r"""Call the search spaces method over HTTP. + + Args: + request (~.space.SearchSpacesRequest): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.space.SearchSpacesResponse: + Response with a list of spaces + corresponding to the search spaces + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/spaces:search", + }, + ] + request, metadata = self._interceptor.pre_search_spaces(request, metadata) + pb_request = space.SearchSpacesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space.SearchSpacesResponse() + pb_resp = space.SearchSpacesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_spaces(resp) + return resp + class _SetUpSpace(ChatServiceRestStub): def __hash__(self): return hash("SetUpSpace") @@ -3671,6 +3791,14 @@ def list_spaces( # In C++ this would require a dynamic_cast return self._ListSpaces(self._session, self._host, self._interceptor) # type: ignore + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchSpaces(self._session, self._host, self._interceptor) # type: ignore + @property def set_up_space(self) -> Callable[[space_setup.SetUpSpaceRequest], space.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index 3e4671f0637d..d510a888bd89 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -102,6 +102,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -198,6 +200,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py index 925e1d7b28ed..f0e00c470930 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py @@ -208,6 +208,20 @@ class CreateMembershipRequest(proto.Message): relation for itself, it must use the ``chat.memberships.app`` scope, set ``user.type`` to ``BOT``, and set ``user.name`` to ``users/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Creating app memberships or creating memberships for users + outside the administrator's Google Workspace organization + isn't supported using admin access. """ parent: str = proto.Field( @@ -219,6 +233,10 @@ class CreateMembershipRequest(proto.Message): number=2, message="Membership", ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=5, + ) class UpdateMembershipRequest(proto.Message): @@ -235,6 +253,16 @@ class UpdateMembershipRequest(proto.Message): Currently supported field paths: - ``role`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. """ membership: "Membership" = proto.Field( @@ -247,6 +275,10 @@ class UpdateMembershipRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class ListMembershipsRequest(proto.Message): @@ -289,8 +321,8 @@ class ListMembershipsRequest(proto.Message): ``ROLE_MANAGER``. To filter by type, set ``member.type`` to ``HUMAN`` or - ``BOT``. Developer Preview: You can also filter for - ``member.type`` using the ``!=`` operator. + ``BOT``. You can also filter for ``member.type`` using the + ``!=`` operator. To filter by both role and type, use the ``AND`` operator. To filter by either role or type, use the ``OR`` operator. @@ -338,6 +370,20 @@ class ListMembershipsRequest(proto.Message): Currently requires `user authentication `__. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.memberships.readonly`` or + ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Listing app memberships in a space isn't supported when + using admin access. """ parent: str = proto.Field( @@ -364,6 +410,10 @@ class ListMembershipsRequest(proto.Message): proto.BOOL, number=7, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=8, + ) class ListMembershipsResponse(proto.Message): @@ -414,12 +464,30 @@ class GetMembershipRequest(proto.Message): For example, ``spaces/{space}/members/example@gmail.com`` where ``example@gmail.com`` is the email of the Google Chat user. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` or + ``chat.admin.memberships.readonly`` `OAuth 2.0 + scopes `__. + + Getting app memberships in a space isn't supported when + using admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class DeleteMembershipRequest(proto.Message): @@ -444,12 +512,29 @@ class DeleteMembershipRequest(proto.Message): Format: ``spaces/{space}/members/{member}`` or ``spaces/{space}/members/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Deleting app memberships in a space isn't supported using + admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 46f46068321a..75456c5e5e13 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -33,6 +33,8 @@ "GetSpaceRequest", "FindDirectMessageRequest", "UpdateSpaceRequest", + "SearchSpacesRequest", + "SearchSpacesResponse", "DeleteSpaceRequest", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", @@ -119,6 +121,9 @@ class Space(proto.Message): Only populated in the output when ``spaceType`` is ``GROUP_CHAT`` or ``SPACE``. + last_active_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last message in + the space. admin_installed (bool): Output only. For direct message (DM) spaces with a Chat app, whether the space was created @@ -129,6 +134,10 @@ class Space(proto.Message): To support admin install, your Chat app must feature direct messaging. + membership_count (google.apps.chat_v1.types.Space.MembershipCount): + Output only. The count of joined memberships grouped by + member type. Populated when the ``space_type`` is ``SPACE``, + ``DIRECT_MESSAGE`` or ``GROUP_CHAT``. access_settings (google.apps.chat_v1.types.Space.AccessSettings): Optional. Specifies the `access setting `__ @@ -228,6 +237,29 @@ class SpaceDetails(proto.Message): number=2, ) + class MembershipCount(proto.Message): + r"""Represents the count of memberships of a space, grouped into + categories. + + Attributes: + joined_direct_human_user_count (int): + Count of human users that have directly + joined the space, not counting users joined by + having membership in a joined group. + joined_group_count (int): + Count of all groups that have directly joined + the space. + """ + + joined_direct_human_user_count: int = proto.Field( + proto.INT32, + number=4, + ) + joined_group_count: int = proto.Field( + proto.INT32, + number=5, + ) + class AccessSettings(proto.Message): r"""Represents the `access setting `__ of the @@ -334,10 +366,20 @@ class AccessState(proto.Enum): number=17, message=timestamp_pb2.Timestamp, ) + last_active_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) admin_installed: bool = proto.Field( proto.BOOL, number=19, ) + membership_count: MembershipCount = proto.Field( + proto.MESSAGE, + number=20, + message=MembershipCount, + ) access_settings: AccessSettings = proto.Field( proto.MESSAGE, number=23, @@ -480,12 +522,27 @@ class GetSpaceRequest(proto.Message): ``spaces/{space}``. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` or + ``chat.admin.spaces.readonly`` `OAuth 2.0 + scopes `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class FindDirectMessageRequest(proto.Message): @@ -591,6 +648,19 @@ class UpdateSpaceRequest(proto.Message): exclusive with all other non-permission settings field paths). ``permission_settings`` is not supported with admin access. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + Some ``FieldMask`` values are not supported using admin + access. For details, see the description of ``update_mask``. """ space: "Space" = proto.Field( @@ -603,6 +673,209 @@ class UpdateSpaceRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class SearchSpacesRequest(proto.Message): + r"""Request to search for a list of spaces based on a query. + + Attributes: + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.spaces.readonly`` or + ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + This method currently only supports admin access, thus only + ``true`` is accepted for this field. + page_size (int): + The maximum number of spaces to return. The + service may return fewer than this value. + + If unspecified, at most 100 spaces are returned. + + The maximum value is 1000. If you use a value + more than 1000, it's automatically changed to + 1000. + page_token (str): + A token, received from the previous search + spaces call. Provide this parameter to retrieve + the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the page + token. Passing different values to the other + parameters might lead to unexpected results. + query (str): + Required. A search query. + + You can search by using the following parameters: + + - ``create_time`` + - ``customer`` + - ``display_name`` + - ``external_user_allowed`` + - ``last_active_time`` + - ``space_history_state`` + - ``space_type`` + + ``create_time`` and ``last_active_time`` accept a timestamp + in `RFC-3339 `__ + format and the supported comparison operators are: ``=``, + ``<``, ``>``, ``<=``, ``>=``. + + ``customer`` is required and is used to indicate which + customer to fetch spaces from. ``customers/my_customer`` is + the only supported value. + + ``display_name`` only accepts the ``HAS`` (``:``) operator. + The text to match is first tokenized into tokens and each + token is prefix-matched case-insensitively and independently + as a substring anywhere in the space's ``display_name``. For + example, ``Fun Eve`` matches ``Fun event`` or + ``The evening was fun``, but not ``notFun event`` or + ``even``. + + ``external_user_allowed`` accepts either ``true`` or + ``false``. + + ``space_history_state`` only accepts values from the + [``historyState``] + (https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces#Space.HistoryState) + field of a ``space`` resource. + + ``space_type`` is required and the only valid value is + ``SPACE``. + + Across different fields, only ``AND`` operators are + supported. A valid example is + ``space_type = "SPACE" AND display_name:"Hello"`` and an + invalid example is + ``space_type = "SPACE" OR display_name:"Hello"``. + + Among the same field, ``space_type`` doesn't support ``AND`` + or ``OR`` operators. ``display_name``, + 'space_history_state', and 'external_user_allowed' only + support ``OR`` operators. ``last_active_time`` and + ``create_time`` support both ``AND`` and ``OR`` operators. + ``AND`` can only be used to represent an interval, such as + ``last_active_time < "2022-01-01T00:00:00+00:00" AND last_active_time > "2023-01-01T00:00:00+00:00"``. + + The following example queries are valid: + + :: + + customer = "customers/my_customer" AND space_type = "SPACE" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + display_name:"Hello World" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (last_active_time < "2020-01-01T00:00:00+00:00" OR last_active_time > + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (display_name:"Hello World" OR display_name:"Fun event") AND + (last_active_time > "2020-01-01T00:00:00+00:00" AND last_active_time < + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (create_time > "2019-01-01T00:00:00+00:00" AND create_time < + "2020-01-01T00:00:00+00:00") AND (external_user_allowed = "true") AND + (space_history_state = "HISTORY_ON" OR space_history_state = "HISTORY_OFF") + order_by (str): + Optional. How the list of spaces is ordered. + + Supported attributes to order by are: + + - ``membership_count.joined_direct_human_user_count`` — + Denotes the count of human users that have directly + joined a space. + - ``last_active_time`` — Denotes the time when last + eligible item is added to any topic of this space. + - ``create_time`` — Denotes the time of the space creation. + + Valid ordering operation values are: + + - ``ASC`` for ascending. Default value. + + - ``DESC`` for descending. + + The supported syntax are: + + - ``membership_count.joined_direct_human_user_count DESC`` + - ``membership_count.joined_direct_human_user_count ASC`` + - ``last_active_time DESC`` + - ``last_active_time ASC`` + - ``create_time DESC`` + - ``create_time ASC`` + """ + + use_admin_access: bool = proto.Field( + proto.BOOL, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + query: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchSpacesResponse(proto.Message): + r"""Response with a list of spaces corresponding to the search + spaces request. + + Attributes: + spaces (MutableSequence[google.apps.chat_v1.types.Space]): + A page of the requested spaces. + next_page_token (str): + A token that can be used to retrieve the next + page. If this field is empty, there are no + subsequent pages. + total_size (int): + The total number of spaces that match the + query, across all pages. If the result is over + 10,000 spaces, this value is an estimate. + """ + + @property + def raw_page(self): + return self + + spaces: MutableSequence["Space"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Space", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) class DeleteSpaceRequest(proto.Message): @@ -613,12 +886,26 @@ class DeleteSpaceRequest(proto.Message): Required. Resource name of the space to delete. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.delete`` `OAuth 2.0 + scope `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class CompleteImportSpaceRequest(proto.Message): diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py new file mode 100644 index 000000000000..3d25def75a1b --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py new file mode 100644 index 000000000000..52e32c45ead5 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 4442b6c5505a..ae65b2bfaefe 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.10" + "version": "0.1.0" }, "snippets": [ { @@ -3551,6 +3551,159 @@ ], "title": "chat_v1_generated_chat_service_list_spaces_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py index 6dfa6d4d93f8..18fea55c4198 100644 --- a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py +++ b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py @@ -40,31 +40,32 @@ class chatCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'complete_import_space': ('name', ), - 'create_membership': ('parent', 'membership', ), + 'create_membership': ('parent', 'membership', 'use_admin_access', ), 'create_message': ('parent', 'message', 'thread_key', 'request_id', 'message_reply_option', 'message_id', ), 'create_reaction': ('parent', 'reaction', ), 'create_space': ('space', 'request_id', ), - 'delete_membership': ('name', ), + 'delete_membership': ('name', 'use_admin_access', ), 'delete_message': ('name', 'force', ), 'delete_reaction': ('name', ), - 'delete_space': ('name', ), + 'delete_space': ('name', 'use_admin_access', ), 'find_direct_message': ('name', ), 'get_attachment': ('name', ), - 'get_membership': ('name', ), + 'get_membership': ('name', 'use_admin_access', ), 'get_message': ('name', ), - 'get_space': ('name', ), + 'get_space': ('name', 'use_admin_access', ), 'get_space_event': ('name', ), 'get_space_read_state': ('name', ), 'get_thread_read_state': ('name', ), - 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', ), + 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', 'use_admin_access', ), 'list_messages': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'show_deleted', ), 'list_reactions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_space_events': ('parent', 'filter', 'page_size', 'page_token', ), 'list_spaces': ('page_size', 'page_token', 'filter', ), + 'search_spaces': ('query', 'use_admin_access', 'page_size', 'page_token', 'order_by', ), 'set_up_space': ('space', 'request_id', 'memberships', ), - 'update_membership': ('membership', 'update_mask', ), + 'update_membership': ('membership', 'update_mask', 'use_admin_access', ), 'update_message': ('message', 'update_mask', 'allow_missing', ), - 'update_space': ('space', 'update_mask', ), + 'update_space': ('space', 'update_mask', 'use_admin_access', ), 'update_space_read_state': ('space_read_state', 'update_mask', ), 'upload_attachment': ('parent', 'filename', ), } diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 2d5d1309f21f..70b8272ff798 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -5289,6 +5289,428 @@ async def test_list_spaces_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + space.SearchSpacesRequest, + dict, + ], +) +def test_search_spaces(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_search_spaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +def test_search_spaces_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + +def test_search_spaces_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc + request = {} + client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +@pytest.mark.asyncio +async def test_search_spaces_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.search_spaces + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.search_spaces + ] = mock_rpc + + request = {} + await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_async( + transport: str = "grpc_asyncio", request_type=space.SearchSpacesRequest +): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_search_spaces_async_from_dict(): + await test_search_spaces_async(request_type=dict) + + +def test_search_spaces_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.search_spaces(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + +def test_search_spaces_pages(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = list(client.search_spaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_spaces_async_pager(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_spaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, space.Space) for i in responses) + + +@pytest.mark.asyncio +async def test_search_spaces_async_pages(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_spaces(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -12682,7 +13104,12 @@ def test_create_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", }, @@ -13612,6 +14039,7 @@ def test_list_memberships_rest_required_fields( "page_token", "show_groups", "show_invited", + "use_admin_access", ) ) jsonified_request.update(unset_fields) @@ -13675,6 +14103,7 @@ def test_list_memberships_rest_unset_required_fields(): "pageToken", "showGroups", "showInvited", + "useAdminAccess", ) ) & set(("parent",)) @@ -13987,6 +14416,8 @@ def test_get_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14040,7 +14471,7 @@ def test_get_membership_rest_unset_required_fields(): ) unset_fields = transport.get_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14820,7 +15251,12 @@ def test_update_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", }, @@ -16013,7 +16449,170 @@ def test_upload_attachment_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_attachment_rest_interceptors(null_interceptor): +def test_upload_attachment_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_upload_attachment" + ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_upload_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment.UploadAttachmentRequest.pb( + attachment.UploadAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = attachment.UploadAttachmentResponse.to_json( + attachment.UploadAttachmentResponse() + ) + + request = attachment.UploadAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.UploadAttachmentResponse() + + client.upload_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_attachment_rest_bad_request( + transport: str = "rest", request_type=attachment.UploadAttachmentRequest +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_attachment(request) + + +def test_upload_attachment_rest_error(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + space.ListSpacesRequest, + dict, + ], +) +def test_list_spaces_rest(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space.ListSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_spaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpacesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_spaces_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + + request = {} + client.list_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16026,15 +16625,13 @@ def test_upload_attachment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_upload_attachment" + transports.ChatServiceRestInterceptor, "post_list_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_upload_attachment" + transports.ChatServiceRestInterceptor, "pre_list_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = attachment.UploadAttachmentRequest.pb( - attachment.UploadAttachmentRequest() - ) + pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16045,19 +16642,19 @@ def test_upload_attachment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = attachment.UploadAttachmentResponse.to_json( - attachment.UploadAttachmentResponse() + req.return_value._content = space.ListSpacesResponse.to_json( + space.ListSpacesResponse() ) - request = attachment.UploadAttachmentRequest() + request = space.ListSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = attachment.UploadAttachmentResponse() + post.return_value = space.ListSpacesResponse() - client.upload_attachment( + client.list_spaces( request, metadata=[ ("key", "val"), @@ -16069,8 +16666,8 @@ def test_upload_attachment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_upload_attachment_rest_bad_request( - transport: str = "rest", request_type=attachment.UploadAttachmentRequest +def test_list_spaces_rest_bad_request( + transport: str = "rest", request_type=space.ListSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16078,7 +16675,7 @@ def test_upload_attachment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16090,23 +16687,78 @@ def test_upload_attachment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_attachment(request) + client.list_spaces(request) -def test_upload_attachment_rest_error(): +def test_list_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_spaces(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + pages = list(client.list_spaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - space.ListSpacesRequest, + space.SearchSpacesRequest, dict, ], ) -def test_list_spaces_rest(request_type): +def test_search_spaces_rest(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16119,27 +16771,29 @@ def test_list_spaces_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.ListSpacesResponse( + return_value = space.SearchSpacesResponse( next_page_token="next_page_token_value", + total_size=1086, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.ListSpacesResponse.pb(return_value) + return_value = space.SearchSpacesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_spaces(request) + response = client.search_spaces(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpacesPager) + assert isinstance(response, pagers.SearchSpacesPager) assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 -def test_list_spaces_rest_use_cached_wrapped_rpc(): +def test_search_spaces_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16153,30 +16807,141 @@ def test_list_spaces_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_spaces in client._transport._wrapped_methods + assert client._transport.search_spaces in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc request = {} - client.list_spaces(request) + client.search_spaces(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_spaces(request) + client.search_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesRequest): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + "query", + "use_admin_access", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = space.SearchSpacesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.SearchSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_spaces(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_spaces_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_spaces._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + "query", + "useAdminAccess", + ) + ) + & set(("query",)) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_spaces_rest_interceptors(null_interceptor): +def test_search_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16189,13 +16954,13 @@ def test_list_spaces_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_spaces" + transports.ChatServiceRestInterceptor, "post_search_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_spaces" + transports.ChatServiceRestInterceptor, "pre_search_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) + pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16206,19 +16971,19 @@ def test_list_spaces_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = space.ListSpacesResponse.to_json( - space.ListSpacesResponse() + req.return_value._content = space.SearchSpacesResponse.to_json( + space.SearchSpacesResponse() ) - request = space.ListSpacesRequest() + request = space.SearchSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.ListSpacesResponse() + post.return_value = space.SearchSpacesResponse() - client.list_spaces( + client.search_spaces( request, metadata=[ ("key", "val"), @@ -16230,8 +16995,8 @@ def test_list_spaces_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_spaces_rest_bad_request( - transport: str = "rest", request_type=space.ListSpacesRequest +def test_search_spaces_rest_bad_request( + transport: str = "rest", request_type=space.SearchSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16251,10 +17016,10 @@ def test_list_spaces_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_spaces(request) + client.search_spaces(request) -def test_list_spaces_rest_pager(transport: str = "rest"): +def test_search_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16266,7 +17031,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16274,17 +17039,17 @@ def test_list_spaces_rest_pager(transport: str = "rest"): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16295,7 +17060,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + response = tuple(space.SearchSpacesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -16304,13 +17069,13 @@ def test_list_spaces_rest_pager(transport: str = "rest"): sample_request = {} - pager = client.list_spaces(request=sample_request) + pager = client.search_spaces(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, space.Space) for i in results) - pages = list(client.list_spaces(request=sample_request).pages) + pages = list(client.search_spaces(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -16441,6 +17206,8 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16494,7 +17261,7 @@ def test_get_space_rest_unset_required_fields(): ) unset_fields = transport.get_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16666,7 +17433,12 @@ def test_create_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", } @@ -17330,7 +18102,12 @@ def test_update_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", } @@ -17510,7 +18287,12 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ credentials=ga_credentials.AnonymousCredentials() ).update_space._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17563,7 +18345,15 @@ def test_update_space_rest_unset_required_fields(): ) unset_fields = transport.update_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "useAdminAccess", + ) + ) + & set(("space",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -17802,6 +18592,8 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17852,7 +18644,7 @@ def test_delete_space_rest_unset_required_fields(): ) unset_fields = transport.delete_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -18707,6 +19499,8 @@ def test_create_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).create_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18762,7 +19556,7 @@ def test_create_membership_rest_unset_required_fields(): unset_fields = transport.create_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("useAdminAccess",)) & set( ( "parent", @@ -19105,7 +19899,12 @@ def test_update_membership_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).update_membership._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19159,7 +19958,12 @@ def test_update_membership_rest_unset_required_fields(): unset_fields = transport.update_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set( + ( + "updateMask", + "useAdminAccess", + ) + ) & set( ( "membership", @@ -19423,6 +20227,8 @@ def test_delete_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19476,7 +20282,7 @@ def test_delete_membership_rest_unset_required_fields(): ) unset_fields = transport.delete_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -22529,6 +23335,7 @@ def test_chat_service_base_transport(): "get_attachment", "upload_attachment", "list_spaces", + "search_spaces", "get_space", "create_space", "set_up_space", @@ -22922,6 +23729,9 @@ def test_chat_service_client_transport_session_collision(transport_name): session1 = client1.transport.list_spaces._session session2 = client2.transport.list_spaces._session assert session1 != session2 + session1 = client1.transport.search_spaces._session + session2 = client2.transport.search_spaces._session + assert session1 != session2 session1 = client1.transport.get_space._session session2 = client2.transport.get_space._session assert session1 != session2 From c03c4411287ee195fd5c99aff94d812381a908f3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:08:32 +0000 Subject: [PATCH 18/59] fix!: [google-cloud-cloudcontrolspartner] Field behavior for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed (#13085) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE fix!: [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed feat: Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` docs: A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed END_COMMIT_OVERRIDE feat: Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` docs: A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed PiperOrigin-RevId: 674282504 Source-Link: https://github.com/googleapis/googleapis/commit/ffbe78335c0b7efe1f0c7f44713e44fa30f7c7d9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ee0f8c5be62f917333559bded6665302ec98b5c9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNsb3VkY29udHJvbHNwYXJ0bmVyLy5Pd2xCb3QueWFtbCIsImgiOiJlZTBmOGM1YmU2MmY5MTczMzM1NTliZGVkNjY2NTMwMmVjOThiNWM5In0= BEGIN_NESTED_COMMIT fix!: [google-cloud-cloudcontrolspartner] Field behavior for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed feat: Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed feat: Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed feat: A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1.PartnerPermissions.Permission` docs: A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed PiperOrigin-RevId: 674282173 Source-Link: https://github.com/googleapis/googleapis/commit/9ebde5402abfe4014e63f3a9bb45c206a2a66f32 Source-Link: https://github.com/googleapis/googleapis-gen/commit/256435db38ff3a1d6d48b175058758b73b8d07a5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNsb3VkY29udHJvbHNwYXJ0bmVyLy5Pd2xCb3QueWFtbCIsImgiOiIyNTY0MzVkYjM4ZmYzYTFkNmQ0OGIxNzUwNTg3NThiNzNiOGQwN2E1In0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../cloud/cloudcontrolspartner_v1/types/customers.py | 10 +++++----- .../types/partner_permissions.py | 4 ++++ .../cloudcontrolspartner_v1beta/types/customers.py | 10 +++++----- .../types/partner_permissions.py | 4 ++++ 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py index 62eee778fdaf..917b2c256294 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py index 072b279e0861..a9c4f2513124 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py index dae25231d4f0..2237867d884f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1beta.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py index d94dff633d35..eddc0cf9ab95 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, From 366f6f10e29a9d9cc307cbd1f16deb4decf26050 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:10:19 +0000 Subject: [PATCH 19/59] feat: [google-maps-routeoptimization] minor fields and documentation update (#13083) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: [google-maps-routeoptimization] minor fields and documentation update feat: A new message `RouteModifiers` is added feat: A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` feat: A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed docs: A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed docs: A comment for message `TimeWindow` is changed docs: A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed docs: A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed docs: A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed docs: A comment for message `OptimizeToursValidationError` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed END_COMMIT_OVERRIDE feat: A new message `RouteModifiers` is added feat: A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` feat: A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed docs: A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed docs: A comment for message `TimeWindow` is changed docs: A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed docs: A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed docs: A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed docs: A comment for message `OptimizeToursValidationError` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed PiperOrigin-RevId: 674021227 Source-Link: https://github.com/googleapis/googleapis/commit/a83432038474bbff69f79a4360a60330bf7eaaa9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/13dfeeb21d7dd16c6f80f7196e07a2a2ace35eb4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcm91dGVvcHRpbWl6YXRpb24vLk93bEJvdC55YW1sIiwiaCI6IjEzZGZlZWIyMWQ3ZGQxNmM2ZjgwZjcxOTZlMDdhMmEyYWNlMzVlYjQifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/maps/routeoptimization/__init__.py | 2 + .../maps/routeoptimization_v1/__init__.py | 2 + .../route_optimization/async_client.py | 13 ++ .../services/route_optimization/client.py | 13 ++ .../route_optimization/transports/grpc.py | 13 ++ .../transports/grpc_asyncio.py | 13 ++ .../routeoptimization_v1/types/__init__.py | 2 + .../types/route_optimization_service.py | 122 ++++++++++++++++-- 8 files changed, 167 insertions(+), 13 deletions(-) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py index 185073f9abae..c2d0f75734bc 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py @@ -41,6 +41,7 @@ OptimizeToursResponse, OptimizeToursValidationError, OutputConfig, + RouteModifiers, Shipment, ShipmentModel, ShipmentRoute, @@ -71,6 +72,7 @@ "OptimizeToursResponse", "OptimizeToursValidationError", "OutputConfig", + "RouteModifiers", "Shipment", "ShipmentModel", "ShipmentRoute", diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py index 053f46b064a7..6f3ede719590 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py @@ -39,6 +39,7 @@ OptimizeToursResponse, OptimizeToursValidationError, OutputConfig, + RouteModifiers, Shipment, ShipmentModel, ShipmentRoute, @@ -69,6 +70,7 @@ "OptimizeToursResponse", "OptimizeToursValidationError", "OutputConfig", + "RouteModifiers", "RouteOptimizationClient", "Shipment", "ShipmentModel", diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py index a87f03648831..3c899f1f772a 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py @@ -410,6 +410,19 @@ async def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py index ba2242e7e73f..c88ee1b4892f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py @@ -812,6 +812,19 @@ def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py index 33cdb5684e3d..8c5621a5f0ba 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py @@ -336,6 +336,19 @@ def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + Returns: Callable[[~.BatchOptimizeToursRequest], ~.Operation]: diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py index 0ebbbbf4c6e8..edf1e396fe6f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py @@ -343,6 +343,19 @@ def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + Returns: Callable[[~.BatchOptimizeToursRequest], Awaitable[~.Operation]]: diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py index 88b09e715f9d..fbcd717dca6f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py @@ -30,6 +30,7 @@ OptimizeToursResponse, OptimizeToursValidationError, OutputConfig, + RouteModifiers, Shipment, ShipmentModel, ShipmentRoute, @@ -58,6 +59,7 @@ "OptimizeToursResponse", "OptimizeToursValidationError", "OutputConfig", + "RouteModifiers", "Shipment", "ShipmentModel", "ShipmentRoute", diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py index f83a834bca15..0dd1de2b1f9d 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py @@ -35,6 +35,7 @@ "Shipment", "ShipmentTypeIncompatibility", "ShipmentTypeRequirement", + "RouteModifiers", "Vehicle", "TimeWindow", "DistanceLimit", @@ -382,7 +383,8 @@ class SolvingMode(proto.Enum): Values: DEFAULT_SOLVE (0): - Solve the model. + Solve the model. Warnings may be issued in + [OptimizeToursResponse.validation_errors][google.cloud.optimization.v1.OptimizeToursResponse.validation_errors]. VALIDATE_ONLY (1): Only validates the model without solving it: populates as many @@ -531,7 +533,8 @@ class OptimizeToursResponse(proto.Message): detect independently. See the "MULTIPLE ERRORS" explanation for the [OptimizeToursValidationError][google.maps.routeoptimization.v1.OptimizeToursValidationError] - message. + message. Instead of errors, this will include warnings in + the case ``solving_mode`` is ``DEFAULT_SOLVE``. metrics (google.maps.routeoptimization_v1.types.OptimizeToursResponse.Metrics): Duration, distance and usage metrics for this solution. @@ -1524,6 +1527,53 @@ class RequirementMode(proto.Enum): ) +class RouteModifiers(proto.Message): + r"""Encapsulates a set of optional conditions to satisfy when + calculating vehicle routes. This is similar to ``RouteModifiers`` in + the Google Maps Platform Routes Preferred API; see: + https://developers.google.com/maps/documentation/routes/reference/rest/v2/RouteModifiers. + + Attributes: + avoid_tolls (bool): + Specifies whether to avoid toll roads where + reasonable. Preference will be given to routes + not containing toll roads. Applies only to + motorized travel modes. + avoid_highways (bool): + Specifies whether to avoid highways where + reasonable. Preference will be given to routes + not containing highways. Applies only to + motorized travel modes. + avoid_ferries (bool): + Specifies whether to avoid ferries where + reasonable. Preference will be given to routes + not containing travel by ferries. Applies only + to motorized travel modes. + avoid_indoor (bool): + Optional. Specifies whether to avoid navigating indoors + where reasonable. Preference will be given to routes not + containing indoor navigation. Applies only to the + ``WALKING`` travel mode. + """ + + avoid_tolls: bool = proto.Field( + proto.BOOL, + number=2, + ) + avoid_highways: bool = proto.Field( + proto.BOOL, + number=3, + ) + avoid_ferries: bool = proto.Field( + proto.BOOL, + number=4, + ) + avoid_indoor: bool = proto.Field( + proto.BOOL, + number=5, + ) + + class Vehicle(proto.Message): r"""Models a vehicle in a shipment problem. Solving a shipment problem will build a route starting from ``start_location`` and ending at @@ -1542,6 +1592,10 @@ class Vehicle(proto.Message): The travel mode which affects the roads usable by the vehicle and its speed. See also ``travel_duration_multiple``. + route_modifiers (google.maps.routeoptimization_v1.types.RouteModifiers): + A set of conditions to satisfy that affect + the way routes are calculated for the given + vehicle. start_location (google.type.latlng_pb2.LatLng): Geographic location where the vehicle starts before picking up any shipments. If not specified, the vehicle starts at @@ -1964,6 +2018,11 @@ class DurationLimit(proto.Message): number=1, enum=TravelMode, ) + route_modifiers: "RouteModifiers" = proto.Field( + proto.MESSAGE, + number=2, + message="RouteModifiers", + ) start_location: latlng_pb2.LatLng = proto.Field( proto.MESSAGE, number=3, @@ -2098,8 +2157,9 @@ class TimeWindow(proto.Message): :: - 0 <= `start_time` <= `soft_start_time` <= `end_time` and - 0 <= `start_time` <= `soft_end_time` <= `end_time`. + 0 <= `start_time` <= `end_time` and + 0 <= `start_time` <= `soft_start_time` and + 0 <= `soft_end_time` <= `end_time`. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -2203,6 +2263,18 @@ class DistanceLimit(proto.Message): must be nonnegative. This field is a member of `oneof`_ ``_soft_max_meters``. + cost_per_kilometer_below_soft_max (float): + Cost per kilometer incurred, increasing up to + ``soft_max_meters``, with formula: + + :: + + min(distance_meters, soft_max_meters) / 1000.0 * + cost_per_kilometer_below_soft_max. + + This cost is not supported in ``route_distance_limit``. + + This field is a member of `oneof`_ ``_cost_per_kilometer_below_soft_max``. cost_per_kilometer_above_soft_max (float): Cost per kilometer incurred if distance is above ``soft_max_meters`` limit. The additional cost is 0 if the @@ -2229,6 +2301,11 @@ class DistanceLimit(proto.Message): number=2, optional=True, ) + cost_per_kilometer_below_soft_max: float = proto.Field( + proto.DOUBLE, + number=4, + optional=True, + ) cost_per_kilometer_above_soft_max: float = proto.Field( proto.DOUBLE, number=3, @@ -3056,7 +3133,8 @@ class SkippedShipment(proto.Message): if specified in the ``Shipment``. reasons (MutableSequence[google.maps.routeoptimization_v1.types.SkippedShipment.Reason]): A list of reasons that explain why the shipment was skipped. - See comment above ``Reason``. + See comment above ``Reason``. If we are unable to understand + why a shipment was skipped, reasons will not be set. """ class Reason(proto.Message): @@ -3115,9 +3193,7 @@ class Code(proto.Enum): Values: CODE_UNSPECIFIED (0): - This should never be used. If we are unable - to understand why a shipment was skipped, we - simply return an empty set of reasons. + This should never be used. NO_VEHICLE (1): There is no vehicle in the model making all shipments infeasible. @@ -3420,8 +3496,8 @@ class Level(proto.Enum): or before them. RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD (2): Same as ``RELAX_VISIT_TIMES_AFTER_THRESHOLD``, but the visit - sequence is also relaxed: visits remain simply bound to - their vehicle. + sequence is also relaxed: visits can only be performed by + this vehicle, but can potentially become unperformed. RELAX_ALL_AFTER_THRESHOLD (3): Same as ``RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD``, but the vehicle is also relaxed: visits are completely free @@ -3478,7 +3554,7 @@ class Level(proto.Enum): class OptimizeToursValidationError(proto.Message): - r"""Describes an error encountered when validating an + r"""Describes an error or warning encountered when validating an ``OptimizeToursRequest``. Attributes: @@ -3592,8 +3668,10 @@ class OptimizeToursValidationError(proto.Message): - TIME_WINDOW_OVERLAPPING_ADJACENT_OR_EARLIER_THAN_PREVIOUS = 2812; - TIME_WINDOW_START_TIME_AFTER_SOFT_START_TIME = 2813; - - TIME_WINDOW_SOFT_START_TIME_AFTER_END_TIME = 2814; - - TIME_WINDOW_START_TIME_AFTER_SOFT_END_TIME = 2815; + - TIME_WINDOW_SOFT_START_TIME_OUTSIDE_GLOBAL_TIME_WINDOW + = 2819; + - TIME_WINDOW_SOFT_END_TIME_OUTSIDE_GLOBAL_TIME_WINDOW = + 2820; - TIME_WINDOW_SOFT_END_TIME_AFTER_END_TIME = 2816; - TIME_WINDOW_COST_BEFORE_SOFT_START_TIME_SET_AND_MULTIPLE_WINDOWS = 2817; @@ -3754,6 +3832,15 @@ class OptimizeToursValidationError(proto.Message): - PRECEDENCE_ERROR = 46; + - PRECEDENCE_RULE_MISSING_FIRST_INDEX = 4600; + - PRECEDENCE_RULE_MISSING_SECOND_INDEX = 4601; + - PRECEDENCE_RULE_FIRST_INDEX_OUT_OF_BOUNDS = 4602; + - PRECEDENCE_RULE_SECOND_INDEX_OUT_OF_BOUNDS = 4603; + - PRECEDENCE_RULE_DUPLICATE_INDEX = 4604; + - PRECEDENCE_RULE_INEXISTENT_FIRST_VISIT_REQUEST = 4605; + - PRECEDENCE_RULE_INEXISTENT_SECOND_VISIT_REQUEST = + 4606; + - BREAK_ERROR = 48; - BREAK_RULE_EMPTY = 4800; @@ -3826,6 +3913,15 @@ class OptimizeToursValidationError(proto.Message): 5600; - DURATION_SECONDS_MATRIX_DURATION_EXCEEDS_GLOBAL_DURATION = 5601; + + - WARNING = 9; + + - WARNING_INJECTED_FIRST_SOLUTION = 90; + + - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_SHIPMENTS_REMOVED + = 9000; + - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_AFTER_GETTING_TRAVEL_TIMES + = 9001; display_name (str): The error display name. fields (MutableSequence[google.maps.routeoptimization_v1.types.OptimizeToursValidationError.FieldReference]): From 5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:17:57 +0000 Subject: [PATCH 20/59] feat(api): [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' (#13086) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat(api): [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' feat: A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added feat: A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added feat: A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added END_COMMIT_OVERRIDE feat: A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added feat: A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added feat: A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added PiperOrigin-RevId: 674440910 Source-Link: https://github.com/googleapis/googleapis/commit/e99de3d3cc1144ad2af19f5e142a1c59f77f2bd3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca2b9f634b559466682d84b12d0aeb9b74ebdba2 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldGFwcC8uT3dsQm90LnlhbWwiLCJoIjoiY2EyYjlmNjM0YjU1OTQ2NjY4MmQ4NGIxMmQwYWViOWI3NGViZGJhMiJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/netapp/__init__.py | 2 + .../google/cloud/netapp_v1/__init__.py | 2 + .../cloud/netapp_v1/gapic_metadata.json | 15 + .../services/net_app/async_client.py | 104 ++++ .../netapp_v1/services/net_app/client.py | 104 ++++ .../services/net_app/transports/base.py | 14 + .../services/net_app/transports/grpc.py | 29 + .../net_app/transports/grpc_asyncio.py | 35 ++ .../services/net_app/transports/rest.py | 139 +++++ .../google/cloud/netapp_v1/types/__init__.py | 2 + .../cloud/netapp_v1/types/storage_pool.py | 25 + .../google/cloud/netapp_v1/types/volume.py | 7 + ...et_app_switch_active_replica_zone_async.py | 56 ++ ...net_app_switch_active_replica_zone_sync.py | 56 ++ ...ippet_metadata_google.cloud.netapp.v1.json | 153 +++++ .../scripts/fixup_netapp_v1_keywords.py | 1 + .../unit/gapic/netapp_v1/test_net_app.py | 575 ++++++++++++++++++ 17 files changed, 1319 insertions(+) create mode 100644 packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py create mode 100644 packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py diff --git a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py index 911af7583e73..66ff5c729847 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py @@ -104,6 +104,7 @@ ListStoragePoolsRequest, ListStoragePoolsResponse, StoragePool, + SwitchActiveReplicaZoneRequest, UpdateStoragePoolRequest, ) from google.cloud.netapp_v1.types.volume import ( @@ -203,6 +204,7 @@ "ListStoragePoolsRequest", "ListStoragePoolsResponse", "StoragePool", + "SwitchActiveReplicaZoneRequest", "UpdateStoragePoolRequest", "BackupConfig", "CreateVolumeRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py index e55949c3cc22..6e842284502b 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py @@ -99,6 +99,7 @@ ListStoragePoolsRequest, ListStoragePoolsResponse, StoragePool, + SwitchActiveReplicaZoneRequest, UpdateStoragePoolRequest, ) from .types.volume import ( @@ -208,6 +209,7 @@ "SnapshotPolicy", "StopReplicationRequest", "StoragePool", + "SwitchActiveReplicaZoneRequest", "TieringPolicy", "TransferStats", "UpdateActiveDirectoryRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json index ca240ed3ab2f..cfe1b93cc977 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json @@ -215,6 +215,11 @@ "stop_replication" ] }, + "SwitchActiveReplicaZone": { + "methods": [ + "switch_active_replica_zone" + ] + }, "UpdateActiveDirectory": { "methods": [ "update_active_directory" @@ -475,6 +480,11 @@ "stop_replication" ] }, + "SwitchActiveReplicaZone": { + "methods": [ + "switch_active_replica_zone" + ] + }, "UpdateActiveDirectory": { "methods": [ "update_active_directory" @@ -735,6 +745,11 @@ "stop_replication" ] }, + "SwitchActiveReplicaZone": { + "methods": [ + "switch_active_replica_zone" + ] + }, "UpdateActiveDirectory": { "methods": [ "update_active_directory" diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py index a1bd9a72ca72..4a46db735693 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py @@ -941,6 +941,110 @@ async def sample_delete_storage_pool(): # Done; return the response. return response + async def switch_active_replica_zone( + self, + request: Optional[ + Union[storage_pool.SwitchActiveReplicaZoneRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""This operation will switch the active/replica zone + for a regional storagePool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest, dict]]): + The request object. SwitchActiveReplicaZoneRequest switch + the active/replica zone for a regional + storagePool. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.StoragePool` StoragePool is a container for volumes with a service level and capacity. + Volumes can be created in a pool of sufficient + available capacity. StoragePool capacity is what you + are billed for. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_pool.SwitchActiveReplicaZoneRequest): + request = storage_pool.SwitchActiveReplicaZoneRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.switch_active_replica_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + storage_pool.StoragePool, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_volumes( self, request: Optional[Union[volume.ListVolumesRequest, dict]] = None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py index 8d8dc7fd2ef6..23ea30de2f0e 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py @@ -1537,6 +1537,110 @@ def sample_delete_storage_pool(): # Done; return the response. return response + def switch_active_replica_zone( + self, + request: Optional[ + Union[storage_pool.SwitchActiveReplicaZoneRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""This operation will switch the active/replica zone + for a regional storagePool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest, dict]): + The request object. SwitchActiveReplicaZoneRequest switch + the active/replica zone for a regional + storagePool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.StoragePool` StoragePool is a container for volumes with a service level and capacity. + Volumes can be created in a pool of sufficient + available capacity. StoragePool capacity is what you + are billed for. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_pool.SwitchActiveReplicaZoneRequest): + request = storage_pool.SwitchActiveReplicaZoneRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.switch_active_replica_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + storage_pool.StoragePool, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + def list_volumes( self, request: Optional[Union[volume.ListVolumesRequest, dict]] = None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py index 19de7acc8804..0609c5169e97 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py @@ -189,6 +189,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.switch_active_replica_zone: gapic_v1.method.wrap_method( + self.switch_active_replica_zone, + default_timeout=None, + client_info=client_info, + ), self.list_volumes: gapic_v1.method.wrap_method( self.list_volumes, default_retry=retries.Retry( @@ -627,6 +632,15 @@ def delete_storage_pool( ]: raise NotImplementedError() + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_volumes( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py index 023b562b7012..37f1b85f9322 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py @@ -407,6 +407,35 @@ def delete_storage_pool( ) return self._stubs["delete_storage_pool"] + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], operations_pb2.Operation + ]: + r"""Return a callable for the switch active replica zone method over gRPC. + + This operation will switch the active/replica zone + for a regional storagePool. + + Returns: + Callable[[~.SwitchActiveReplicaZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switch_active_replica_zone" not in self._stubs: + self._stubs["switch_active_replica_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/SwitchActiveReplicaZone", + request_serializer=storage_pool.SwitchActiveReplicaZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switch_active_replica_zone"] + @property def list_volumes( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py index 933f69b45e91..fe94f2e42c7c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py @@ -418,6 +418,36 @@ def delete_storage_pool( ) return self._stubs["delete_storage_pool"] + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the switch active replica zone method over gRPC. + + This operation will switch the active/replica zone + for a regional storagePool. + + Returns: + Callable[[~.SwitchActiveReplicaZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switch_active_replica_zone" not in self._stubs: + self._stubs["switch_active_replica_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/SwitchActiveReplicaZone", + request_serializer=storage_pool.SwitchActiveReplicaZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switch_active_replica_zone"] + @property def list_volumes( self, @@ -1753,6 +1783,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.switch_active_replica_zone: gapic_v1.method_async.wrap_method( + self.switch_active_replica_zone, + default_timeout=None, + client_info=client_info, + ), self.list_volumes: gapic_v1.method_async.wrap_method( self.list_volumes, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py index 0a163bc67ea5..da0a85a7bcd9 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py @@ -416,6 +416,14 @@ def post_stop_replication(self, response): logging.log(f"Received response: {response}") return response + def pre_switch_active_replica_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_switch_active_replica_zone(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_active_directory(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1419,6 +1427,29 @@ def post_stop_replication( """ return response + def pre_switch_active_replica_zone( + self, + request: storage_pool.SwitchActiveReplicaZoneRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storage_pool.SwitchActiveReplicaZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for switch_active_replica_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_switch_active_replica_zone( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for switch_active_replica_zone + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + def pre_update_active_directory( self, request: gcn_active_directory.UpdateActiveDirectoryRequest, @@ -5680,6 +5711,104 @@ def __call__( resp = self._interceptor.post_stop_replication(resp) return resp + class _SwitchActiveReplicaZone(NetAppRestStub): + def __hash__(self): + return hash("SwitchActiveReplicaZone") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storage_pool.SwitchActiveReplicaZoneRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the switch active replica + zone method over HTTP. + + Args: + request (~.storage_pool.SwitchActiveReplicaZoneRequest): + The request object. SwitchActiveReplicaZoneRequest switch + the active/replica zone for a regional + storagePool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/storagePools/*}:switch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_switch_active_replica_zone( + request, metadata + ) + pb_request = storage_pool.SwitchActiveReplicaZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_switch_active_replica_zone(resp) + return resp + class _UpdateActiveDirectory(NetAppRestStub): def __hash__(self): return hash("UpdateActiveDirectory") @@ -7004,6 +7133,16 @@ def stop_replication( # In C++ this would require a dynamic_cast return self._StopReplication(self._session, self._host, self._interceptor) # type: ignore + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SwitchActiveReplicaZone(self._session, self._host, self._interceptor) # type: ignore + @property def update_active_directory( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py index fbdb02042fae..c06102732661 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py @@ -93,6 +93,7 @@ ListStoragePoolsRequest, ListStoragePoolsResponse, StoragePool, + SwitchActiveReplicaZoneRequest, UpdateStoragePoolRequest, ) from .volume import ( @@ -190,6 +191,7 @@ "ListStoragePoolsRequest", "ListStoragePoolsResponse", "StoragePool", + "SwitchActiveReplicaZoneRequest", "UpdateStoragePoolRequest", "BackupConfig", "CreateVolumeRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py index e9687e66b7b4..f7815be021ac 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py @@ -32,6 +32,7 @@ "CreateStoragePoolRequest", "UpdateStoragePoolRequest", "DeleteStoragePoolRequest", + "SwitchActiveReplicaZoneRequest", "StoragePool", }, ) @@ -199,6 +200,21 @@ class DeleteStoragePoolRequest(proto.Message): ) +class SwitchActiveReplicaZoneRequest(proto.Message): + r"""SwitchActiveReplicaZoneRequest switch the active/replica zone + for a regional storagePool. + + Attributes: + name (str): + Required. Name of the storage pool + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class StoragePool(proto.Message): r"""StoragePool is a container for volumes with a service level and capacity. Volumes can be created in a pool of sufficient @@ -255,6 +271,11 @@ class StoragePool(proto.Message): AD or DNS server from other regions. This field is a member of `oneof`_ ``_global_access_allowed``. + allow_auto_tiering (bool): + Optional. True if the storage pool supports + Auto Tiering enabled volumes. Default is false. + Auto-tiering can be enabled after storage pool + creation but it can't be disabled once enabled. replica_zone (str): Optional. Specifies the replica zone for regional storagePool. @@ -367,6 +388,10 @@ class State(proto.Enum): number=17, optional=True, ) + allow_auto_tiering: bool = proto.Field( + proto.BOOL, + number=18, + ) replica_zone: str = proto.Field( proto.STRING, number=20, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index a10bd13bb922..2589c8149e04 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -466,6 +466,9 @@ class Volume(proto.Message): zone (str): Output only. Specifies the active zone for regional volume. + cold_tier_size_gib (int): + Output only. Size of the volume cold tier + data in GiB. """ class State(proto.Enum): @@ -659,6 +662,10 @@ class State(proto.Enum): proto.STRING, number=37, ) + cold_tier_size_gib: int = proto.Field( + proto.INT64, + number=39, + ) class ExportPolicy(proto.Message): diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py new file mode 100644 index 000000000000..bb3bed5a2e23 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchActiveReplicaZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_SwitchActiveReplicaZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_SwitchActiveReplicaZone_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py new file mode 100644 index 000000000000..cf4ce4ac46c5 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchActiveReplicaZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_SwitchActiveReplicaZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_SwitchActiveReplicaZone_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index 3edfdb18757c..a43c86575bc2 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -6716,6 +6716,159 @@ ], "title": "netapp_v1_generated_net_app_stop_replication_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.switch_active_replica_zone", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.SwitchActiveReplicaZone", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "SwitchActiveReplicaZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "switch_active_replica_zone" + }, + "description": "Sample for SwitchActiveReplicaZone", + "file": "netapp_v1_generated_net_app_switch_active_replica_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_SwitchActiveReplicaZone_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_switch_active_replica_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.switch_active_replica_zone", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.SwitchActiveReplicaZone", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "SwitchActiveReplicaZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "switch_active_replica_zone" + }, + "description": "Sample for SwitchActiveReplicaZone", + "file": "netapp_v1_generated_net_app_switch_active_replica_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_SwitchActiveReplicaZone_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_switch_active_replica_zone_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py b/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py index 3a50d5244cd5..c317695a165e 100644 --- a/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py +++ b/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py @@ -80,6 +80,7 @@ class netappCallTransformer(cst.CSTTransformer): 'reverse_replication_direction': ('name', ), 'revert_volume': ('name', 'snapshot_id', ), 'stop_replication': ('name', 'force', ), + 'switch_active_replica_zone': ('name', ), 'update_active_directory': ('update_mask', 'active_directory', ), 'update_backup': ('update_mask', 'backup', ), 'update_backup_policy': ('update_mask', 'backup_policy', ), diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index 0465f736de27..0fbc5b0e378e 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -2128,6 +2128,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -2156,6 +2157,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): assert response.psa_range == "psa_range_value" assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED assert response.global_access_allowed is True + assert response.allow_auto_tiering is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" @@ -2273,6 +2275,7 @@ async def test_get_storage_pool_empty_call_async(): psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -2358,6 +2361,7 @@ async def test_get_storage_pool_async( psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -2387,6 +2391,7 @@ async def test_get_storage_pool_async( assert response.psa_range == "psa_range_value" assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED assert response.global_access_allowed is True + assert response.allow_auto_tiering is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" @@ -3324,6 +3329,311 @@ async def test_delete_storage_pool_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + storage_pool.SwitchActiveReplicaZoneRequest, + dict, + ], +) +def test_switch_active_replica_zone(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_pool.SwitchActiveReplicaZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_switch_active_replica_zone_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.switch_active_replica_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_pool.SwitchActiveReplicaZoneRequest() + + +def test_switch_active_replica_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_pool.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.switch_active_replica_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_pool.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + +def test_switch_active_replica_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_active_replica_zone + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_active_replica_zone + ] = mock_rpc + request = {} + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.switch_active_replica_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_pool.SwitchActiveReplicaZoneRequest() + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.switch_active_replica_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.switch_active_replica_zone + ] = mock_rpc + + request = {} + await client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_async( + transport: str = "grpc_asyncio", + request_type=storage_pool.SwitchActiveReplicaZoneRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_pool.SwitchActiveReplicaZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_async_from_dict(): + await test_switch_active_replica_zone_async(request_type=dict) + + +def test_switch_active_replica_zone_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_pool.SwitchActiveReplicaZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_pool.SwitchActiveReplicaZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -3943,6 +4253,7 @@ def test_get_volume(request_type, transport: str = "grpc"): multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) response = client.get_volume(request) @@ -3982,6 +4293,7 @@ def test_get_volume(request_type, transport: str = "grpc"): assert response.multiple_endpoints is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" + assert response.cold_tier_size_gib == 1888 def test_get_volume_empty_call(): @@ -4108,6 +4420,7 @@ async def test_get_volume_empty_call_async(): multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) ) response = await client.get_volume() @@ -4202,6 +4515,7 @@ async def test_get_volume_async( multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) ) response = await client.get_volume(request) @@ -4242,6 +4556,7 @@ async def test_get_volume_async( assert response.multiple_endpoints is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" + assert response.cold_tier_size_gib == 1888 @pytest.mark.asyncio @@ -22924,6 +23239,7 @@ def test_create_storage_pool_rest(request_type): "psa_range": "psa_range_value", "encryption_type": 1, "global_access_allowed": True, + "allow_auto_tiering": True, "replica_zone": "replica_zone_value", "zone": "zone_value", } @@ -23345,6 +23661,7 @@ def test_get_storage_pool_rest(request_type): psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -23377,6 +23694,7 @@ def test_get_storage_pool_rest(request_type): assert response.psa_range == "psa_range_value" assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED assert response.global_access_allowed is True + assert response.allow_auto_tiering is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" @@ -23683,6 +24001,7 @@ def test_update_storage_pool_rest(request_type): "psa_range": "psa_range_value", "encryption_type": 1, "global_access_allowed": True, + "allow_auto_tiering": True, "replica_zone": "replica_zone_value", "zone": "zone_value", } @@ -24358,6 +24677,254 @@ def test_delete_storage_pool_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + storage_pool.SwitchActiveReplicaZoneRequest, + dict, + ], +) +def test_switch_active_replica_zone_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.switch_active_replica_zone(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_switch_active_replica_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_active_replica_zone + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_active_replica_zone + ] = mock_rpc + + request = {} + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_switch_active_replica_zone_rest_required_fields( + request_type=storage_pool.SwitchActiveReplicaZoneRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.switch_active_replica_zone(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_switch_active_replica_zone_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.switch_active_replica_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_switch_active_replica_zone_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_switch_active_replica_zone" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_switch_active_replica_zone" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storage_pool.SwitchActiveReplicaZoneRequest.pb( + storage_pool.SwitchActiveReplicaZoneRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = storage_pool.SwitchActiveReplicaZoneRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.switch_active_replica_zone( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_switch_active_replica_zone_rest_bad_request( + transport: str = "rest", request_type=storage_pool.SwitchActiveReplicaZoneRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.switch_active_replica_zone(request) + + +def test_switch_active_replica_zone_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -24779,6 +25346,7 @@ def test_get_volume_rest(request_type): multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) # Wrap the value into a proper Response obj @@ -24822,6 +25390,7 @@ def test_get_volume_rest(request_type): assert response.multiple_endpoints is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" + assert response.cold_tier_size_gib == 1888 def test_get_volume_rest_use_cached_wrapped_rpc(): @@ -25184,6 +25753,7 @@ def test_create_volume_rest(request_type): "tiering_policy": {"tier_action": 1, "cooling_threshold_days": 2343}, "replica_zone": "replica_zone_value", "zone": "zone_value", + "cold_tier_size_gib": 1888, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25668,6 +26238,7 @@ def test_update_volume_rest(request_type): "tiering_policy": {"tier_action": 1, "cooling_threshold_days": 2343}, "replica_zone": "replica_zone_value", "zone": "zone_value", + "cold_tier_size_gib": 1888, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -40680,6 +41251,7 @@ def test_net_app_base_transport(): "get_storage_pool", "update_storage_pool", "delete_storage_pool", + "switch_active_replica_zone", "list_volumes", "get_volume", "create_volume", @@ -41024,6 +41596,9 @@ def test_net_app_client_transport_session_collision(transport_name): session1 = client1.transport.delete_storage_pool._session session2 = client2.transport.delete_storage_pool._session assert session1 != session2 + session1 = client1.transport.switch_active_replica_zone._session + session2 = client2.transport.switch_active_replica_zone._session + assert session1 != session2 session1 = client1.transport.list_volumes._session session2 = client2.transport.list_volumes._session assert session1 != session2 From 35b2c456c6791bc47ffe894f3ef966558cb6c98e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 10:56:49 -0700 Subject: [PATCH 21/59] docs: [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` (#13076) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 673500457 Source-Link: https://github.com/googleapis/googleapis/commit/c27097ea636b7b2699f1a1c9c6bf3fb66ff8a789 Source-Link: https://github.com/googleapis/googleapis-gen/commit/dd6d5ed8b59764109b996ba6895dd10be1c8b865 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFzc2V0Ly5Pd2xCb3QueWFtbCIsImgiOiJkZDZkNWVkOGI1OTc2NDEwOWI5OTZiYTY4OTVkZDEwYmUxYzhiODY1In0= --------- Co-authored-by: Owl Bot --- .../google/cloud/asset_v1/types/asset_service.py | 14 ++++++++------ .../google/cloud/asset_v1/types/assets.py | 6 +++--- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py index 88d2a833272d..1d09f9e767c6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py @@ -2780,9 +2780,11 @@ class QueryAssetsResponse(proto.Message): valid ``response``. If ``done`` == ``false`` and the query result is being saved - in a output, the output_config field will be set. If + in an output, the output_config field will be set. If ``done`` == ``true``, exactly one of ``error``, - ``query_result`` or ``output_config`` will be set. + ``query_result`` or ``output_config`` will be set. [done] is + unset unless the [QueryAssetsResponse] contains a + [QueryAssetsResponse.job_reference]. error (google.rpc.status_pb2.Status): Error status. @@ -2792,10 +2794,10 @@ class QueryAssetsResponse(proto.Message): This field is a member of `oneof`_ ``response``. output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Output configuration which indicates instead - of being returned in API response on the fly, - the query result will be saved in a specific - output. + Output configuration, which indicates that + instead of being returned in an API response on + the fly, the query result will be saved in a + specific output. This field is a member of `oneof`_ ``response``. """ diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py index da13dc114c9b..d31228b6e04c 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py @@ -602,7 +602,7 @@ class EffectiveTagDetails(proto.Message): class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. Next ID: 34 + cloud resource. Attributes: name (str): @@ -684,8 +684,8 @@ class ResourceSearchResult(proto.Message): - Use a field query. Example: ``location:us-west*`` - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): - Labels associated with this resource. See `Labelling and - grouping Google Cloud + User labels associated with this resource. See `Labelling + and grouping Google Cloud resources `__ for more information. This field is available only when the resource's Protobuf contains it. From 8fc8b25192eead473cf590b372d352a71634c8f2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 16 Sep 2024 14:31:32 -0400 Subject: [PATCH 22/59] build: release script update (#13089) Towards b/366438331 --- .kokoro/release-single.sh | 2 +- .kokoro/release/common.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.kokoro/release-single.sh b/.kokoro/release-single.sh index 5665c4828a93..f917f8ef66d0 100755 --- a/.kokoro/release-single.sh +++ b/.kokoro/release-single.sh @@ -21,7 +21,7 @@ set -eo pipefail pwd # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index c9b8a36f766d..830be65dde19 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From 33834de6d9eeced6da30f3fcbeb4e1029e07cf18 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 15:19:57 -0400 Subject: [PATCH 23/59] feat: [google-cloud-gke-multicloud] An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added (#13088) BEGIN_COMMIT_OVERRIDE feat: An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added feat: An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added PiperOrigin-RevId: 674451558 Source-Link: https://github.com/googleapis/googleapis/commit/31df26d0ff3193117fbf9d6dd25280dc3f8ca978 Source-Link: https://github.com/googleapis/googleapis-gen/commit/23e655713582360b568172348be5d883caa40efb Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1tdWx0aWNsb3VkLy5Pd2xCb3QueWFtbCIsImgiOiIyM2U2NTU3MTM1ODIzNjBiNTY4MTcyMzQ4YmU1ZDg4M2NhYTQwZWZiIn0= --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/gke_multicloud/__init__.py | 4 + .../cloud/gke_multicloud_v1/__init__.py | 4 + .../attached_clusters/async_client.py | 1 + .../services/attached_clusters/client.py | 1 + .../cloud/gke_multicloud_v1/types/__init__.py | 4 + .../types/attached_resources.py | 8 ++ .../types/attached_service.py | 3 +- .../gke_multicloud_v1/types/aws_resources.py | 9 +- .../types/azure_resources.py | 2 +- .../types/common_resources.py | 127 ++++++++++++++++++ .../test_attached_clusters.py | 2 + .../gke_multicloud_v1/test_aws_clusters.py | 14 ++ 12 files changed, 176 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py index 718abad6aa06..bd6a6e31b887 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py @@ -175,8 +175,10 @@ ManagedPrometheusConfig, MaxPodsConstraint, MonitoringConfig, + NodeKubeletConfig, NodeTaint, OperationMetadata, + SecurityPostureConfig, WorkloadIdentityConfig, ) @@ -313,7 +315,9 @@ "ManagedPrometheusConfig", "MaxPodsConstraint", "MonitoringConfig", + "NodeKubeletConfig", "NodeTaint", "OperationMetadata", + "SecurityPostureConfig", "WorkloadIdentityConfig", ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py index e6ae2f8a5e9c..88648899fa1c 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py @@ -163,8 +163,10 @@ ManagedPrometheusConfig, MaxPodsConstraint, MonitoringConfig, + NodeKubeletConfig, NodeTaint, OperationMetadata, + SecurityPostureConfig, WorkloadIdentityConfig, ) @@ -291,10 +293,12 @@ "ManagedPrometheusConfig", "MaxPodsConstraint", "MonitoringConfig", + "NodeKubeletConfig", "NodeTaint", "OperationMetadata", "ReplicaPlacement", "RollbackAwsNodePoolUpdateRequest", + "SecurityPostureConfig", "SpotConfig", "SurgeSettings", "UpdateAttachedClusterRequest", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py index 47e8caee062a..60c78caa006a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py @@ -533,6 +533,7 @@ async def sample_update_attached_cluster(): - ``platform_version``. - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. + - ``security_posture_config.vulnerability_mode`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py index a380b95dfcf7..b8c138ceaef6 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py @@ -966,6 +966,7 @@ def sample_update_attached_cluster(): - ``platform_version``. - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. + - ``security_posture_config.vulnerability_mode`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py index 664570793e2b..1f7c9d3c38e8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py @@ -152,8 +152,10 @@ ManagedPrometheusConfig, MaxPodsConstraint, MonitoringConfig, + NodeKubeletConfig, NodeTaint, OperationMetadata, + SecurityPostureConfig, WorkloadIdentityConfig, ) @@ -284,7 +286,9 @@ "ManagedPrometheusConfig", "MaxPodsConstraint", "MonitoringConfig", + "NodeKubeletConfig", "NodeTaint", "OperationMetadata", + "SecurityPostureConfig", "WorkloadIdentityConfig", ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py index 253db740d172..de4ea8f3bddc 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py @@ -134,6 +134,9 @@ class AttachedCluster(proto.Message): binary_authorization (google.cloud.gke_multicloud_v1.types.BinaryAuthorization): Optional. Binary Authorization configuration for this cluster. + security_posture_config (google.cloud.gke_multicloud_v1.types.SecurityPostureConfig): + Optional. Security Posture configuration for + this cluster. """ class State(proto.Enum): @@ -272,6 +275,11 @@ class State(proto.Enum): number=25, message=common_resources.BinaryAuthorization, ) + security_posture_config: common_resources.SecurityPostureConfig = proto.Field( + proto.MESSAGE, + number=26, + message=common_resources.SecurityPostureConfig, + ) class AttachedClustersAuthorization(proto.Message): diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py index e5a54e3562c5..e0791affdff6 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py @@ -211,7 +211,7 @@ class ImportAttachedClusterRequest(proto.Message): Required. The Kubernetes distribution of the underlying attached cluster. - Supported values: ["eks", "aks"]. + Supported values: ["eks", "aks", "generic"]. proxy_config (google.cloud.gke_multicloud_v1.types.AttachedProxyConfig): Optional. Proxy configuration for outbound HTTP(S) traffic. @@ -272,6 +272,7 @@ class UpdateAttachedClusterRequest(proto.Message): - ``platform_version``. - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. + - ``security_posture_config.vulnerability_mode`` """ attached_cluster: attached_resources.AttachedCluster = proto.Field( diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py index 73fbfa3225ab..2943000087e5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py @@ -735,6 +735,8 @@ class AwsNodePool(proto.Message): management (google.cloud.gke_multicloud_v1.types.AwsNodeManagement): Optional. The Management configuration for this node pool. + kubelet_config (google.cloud.gke_multicloud_v1.types.NodeKubeletConfig): + Optional. Node kubelet configs. update_settings (google.cloud.gke_multicloud_v1.types.UpdateSettings): Optional. Update settings control the speed and disruption of the update. @@ -843,6 +845,11 @@ class State(proto.Enum): number=30, message="AwsNodeManagement", ) + kubelet_config: common_resources.NodeKubeletConfig = proto.Field( + proto.MESSAGE, + number=31, + message=common_resources.NodeKubeletConfig, + ) update_settings: "UpdateSettings" = proto.Field( proto.MESSAGE, number=32, @@ -1279,7 +1286,7 @@ class AwsProxyConfig(proto.Message): The secret must be a JSON encoded proxy configuration as described in - https://cloud.google.com/anthos/clusters/docs/multi-cloud/aws/how-to/use-a-proxy#create_a_proxy_configuration_file + https://cloud.google.com/kubernetes-engine/multi-cloud/docs/aws/how-to/use-a-proxy#create_a_proxy_configuration_file secret_version (str): The version string of the AWS Secret Manager secret that contains the HTTP(S) proxy diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py index a6d17925f782..597c6e78f78a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py @@ -557,7 +557,7 @@ class AzureProxyConfig(proto.Message): The secret must be a JSON encoded proxy configuration as described in - https://cloud.google.com/anthos/clusters/docs/multi-cloud/azure/how-to/use-a-proxy#create_a_proxy_configuration_file + https://cloud.google.com/kubernetes-engine/multi-cloud/docs/azure/how-to/use-a-proxy#create_a_proxy_configuration_file Secret ids are formatted as ``https://.vault.azure.net/secrets//``. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py index 9915db5dcc2b..58f053bff12c 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py @@ -28,12 +28,14 @@ "MaxPodsConstraint", "OperationMetadata", "NodeTaint", + "NodeKubeletConfig", "Fleet", "LoggingConfig", "LoggingComponentConfig", "MonitoringConfig", "ManagedPrometheusConfig", "BinaryAuthorization", + "SecurityPostureConfig", }, ) @@ -263,6 +265,96 @@ class Effect(proto.Enum): ) +class NodeKubeletConfig(proto.Message): + r"""Configuration for node pool kubelet options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + insecure_kubelet_readonly_port_enabled (bool): + Optional. Enable the insecure kubelet read + only port. + cpu_manager_policy (str): + Optional. Control the CPU management policy on the node. See + https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/ + + The following values are allowed. + + - "none": the default, which represents the existing + scheduling behavior. + - "static": allows pods with certain resource + characteristics to be granted increased CPU affinity and + exclusivity on the node. The default value is 'none' if + unspecified. + + This field is a member of `oneof`_ ``_cpu_manager_policy``. + cpu_cfs_quota (bool): + Optional. Enable CPU CFS quota enforcement + for containers that specify CPU limits. + + This option is enabled by default which makes + kubelet use CFS quota + (https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt) + to enforce container CPU limits. Otherwise, CPU + limits will not be enforced at all. + + Disable this option to mitigate CPU throttling + problems while still having your pods to be in + Guaranteed QoS class by specifying the CPU + limits. + + The default value is 'true' if unspecified. + + This field is a member of `oneof`_ ``_cpu_cfs_quota``. + cpu_cfs_quota_period (str): + Optional. Set the CPU CFS quota period value + 'cpu.cfs_period_us'. + + The string must be a sequence of decimal numbers, each with + optional fraction and a unit suffix, such as "300ms". Valid + time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". + The value must be a positive duration. + + The default value is '100ms' if unspecified. + + This field is a member of `oneof`_ ``_cpu_cfs_quota_period``. + pod_pids_limit (int): + Optional. Set the Pod PID limits. See + https://kubernetes.io/docs/concepts/policy/pid-limiting/#pod-pid-limits + + Controls the maximum number of processes allowed + to run in a pod. The value must be greater than + or equal to 1024 and less than 4194304. + + This field is a member of `oneof`_ ``_pod_pids_limit``. + """ + + insecure_kubelet_readonly_port_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cpu_manager_policy: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + cpu_cfs_quota: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + cpu_cfs_quota_period: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + pod_pids_limit: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + + class Fleet(proto.Message): r"""Fleet related configuration. @@ -414,4 +506,39 @@ class EvaluationMode(proto.Enum): ) +class SecurityPostureConfig(proto.Message): + r"""SecurityPostureConfig defines the flags needed to + enable/disable features for the Security Posture API. + + Attributes: + vulnerability_mode (google.cloud.gke_multicloud_v1.types.SecurityPostureConfig.VulnerabilityMode): + Sets which mode to use for vulnerability + scanning. + """ + + class VulnerabilityMode(proto.Enum): + r"""VulnerabilityMode defines enablement mode for vulnerability + scanning. + + Values: + VULNERABILITY_MODE_UNSPECIFIED (0): + Default value not specified. + VULNERABILITY_DISABLED (1): + Disables vulnerability scanning on the + cluster. + VULNERABILITY_ENTERPRISE (2): + Applies the Security Posture's vulnerability + on cluster Enterprise level features. + """ + VULNERABILITY_MODE_UNSPECIFIED = 0 + VULNERABILITY_DISABLED = 1 + VULNERABILITY_ENTERPRISE = 2 + + vulnerability_mode: VulnerabilityMode = proto.Field( + proto.ENUM, + number=1, + enum=VulnerabilityMode, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index a438c01e8714..1aeeadddb0c6 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -4992,6 +4992,7 @@ def test_create_attached_cluster_rest(request_type): "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} }, "binary_authorization": {"evaluation_mode": 1}, + "security_posture_config": {"vulnerability_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5440,6 +5441,7 @@ def test_update_attached_cluster_rest(request_type): "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} }, "binary_authorization": {"evaluation_mode": 1}, + "security_posture_config": {"vulnerability_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py index 87fa3f604e12..f9a695942606 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py @@ -10124,6 +10124,13 @@ def test_create_aws_node_pool_rest(request_type): "max_pods_constraint": {"max_pods_per_node": 1798}, "errors": [{"message": "message_value"}], "management": {"auto_repair": True}, + "kubelet_config": { + "insecure_kubelet_readonly_port_enabled": True, + "cpu_manager_policy": "cpu_manager_policy_value", + "cpu_cfs_quota": True, + "cpu_cfs_quota_period": "cpu_cfs_quota_period_value", + "pod_pids_limit": 1488, + }, "update_settings": { "surge_settings": {"max_surge": 971, "max_unavailable": 1577} }, @@ -10590,6 +10597,13 @@ def test_update_aws_node_pool_rest(request_type): "max_pods_constraint": {"max_pods_per_node": 1798}, "errors": [{"message": "message_value"}], "management": {"auto_repair": True}, + "kubelet_config": { + "insecure_kubelet_readonly_port_enabled": True, + "cpu_manager_policy": "cpu_manager_policy_value", + "cpu_cfs_quota": True, + "cpu_cfs_quota_period": "cpu_cfs_quota_period_value", + "pod_pids_limit": 1488, + }, "update_settings": { "surge_settings": {"max_surge": 971, "max_unavailable": 1577} }, From a032b1147b93281760525a161b9f2437e8d7aeba Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Mon, 16 Sep 2024 12:25:21 -0700 Subject: [PATCH 24/59] chore: Update the root changelog (#13073) Update the root changelog Co-authored-by: ohmayr --- CHANGELOG.md | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f4e47fb21b5..1004d55731cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,11 +3,12 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) +- [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) - [google-ai-generativelanguage==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) - [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-chat==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) - [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) - [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) @@ -15,12 +16,12 @@ Changelogs - [google-cloud-access-approval==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) - [google-cloud-advisorynotifications==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) - [google-cloud-alloydb-connectors==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-alloydb==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) - [google-cloud-api-gateway==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) - [google-cloud-api-keys==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) - [google-cloud-apigee-connect==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) - [google-cloud-apigee-registry==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-apihub==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) +- [google-cloud-apihub==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) - [google-cloud-appengine-admin==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) - [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) - [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) @@ -30,7 +31,7 @@ Changelogs - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.26](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.27](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -41,7 +42,7 @@ Changelogs - [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) - [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) - [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) - [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) - [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) @@ -59,7 +60,7 @@ Changelogs - [google-cloud-confidentialcomputing==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) - [google-cloud-config==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) - [google-cloud-contact-center-insights==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.50.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-container==2.51.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) - [google-cloud-containeranalysis==2.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) - [google-cloud-contentwarehouse==0.7.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) - [google-cloud-data-fusion==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) @@ -71,19 +72,19 @@ Changelogs - [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) - [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-dataproc==5.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) - [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) - [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) - [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-discoveryengine==0.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) - [google-cloud-dlp==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-documentai==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) - [google-cloud-edgecontainer==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) - [google-cloud-enterpriseknowledgegraph==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) - [google-cloud-essential-contacts==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) - [google-cloud-eventarc-publishing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) @@ -132,7 +133,7 @@ Changelogs - [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) - [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) - [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) - [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) @@ -148,7 +149,7 @@ Changelogs - [google-cloud-securitycentermanagement==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) - [google-cloud-service-control==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) - [google-cloud-service-directory==1.11.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-management==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) - [google-cloud-service-usage==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) - [google-cloud-servicehealth==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) - [google-cloud-shell==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) @@ -170,7 +171,7 @@ Changelogs - [google-cloud-video-transcoder==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) - [google-cloud-videointelligence==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) - [google-cloud-vision==3.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-visionai==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) - [google-cloud-vm-migration==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) - [google-cloud-vmwareengine==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) - [google-cloud-vpc-access==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) @@ -180,7 +181,7 @@ Changelogs - [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) - [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) - [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) - [google-maps-places==0.1.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) From 786ac0ec331c405dd84cb1ac76b59a3463b7ba3a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 15:37:52 -0400 Subject: [PATCH 25/59] chore: release main (#13087) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-apps-chat: 0.1.11 ## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.10...google-apps-chat-v0.1.11) (2024-09-16) ### Features * If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) ### Documentation * A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac))
google-cloud-asset: 3.26.4 ## [3.26.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.3...google-cloud-asset-v3.26.4) (2024-09-16) ### Documentation * [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` ([#13076](https://github.com/googleapis/google-cloud-python/issues/13076)) ([35b2c45](https://github.com/googleapis/google-cloud-python/commit/35b2c456c6791bc47ffe894f3ef966558cb6c98e))
google-cloud-batch: 0.17.28 ## [0.17.28](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.27...google-cloud-batch-v0.17.28) (2024-09-16) ### Features * [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` ([#13074](https://github.com/googleapis/google-cloud-python/issues/13074)) ([76267b2](https://github.com/googleapis/google-cloud-python/commit/76267b2b8998fd2a3602ebf4d12d2aaa30a90cde))
google-cloud-cloudcontrolspartner: 0.2.0 ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.3...google-cloud-cloudcontrolspartner-v0.2.0) (2024-09-16) ### ⚠ BREAKING CHANGES * [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ### Features * A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) * Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) * Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) ### Bug Fixes * [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) ### Documentation * A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3))
google-cloud-dataproc: 5.12.0 ## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) ### Features * [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add kms key input for create cluster API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] add resource reference for KMS keys and fix comments ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add unreachable output field for LIST batch templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add unreachable output field for LIST jobs API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add unreachable output field for LIST workflow template API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Allow flink and trino job support for workflow templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Allow flink job support for jobs ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224))
google-cloud-gke-multicloud: 0.6.13 ## [0.6.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.12...google-cloud-gke-multicloud-v0.6.13) (2024-09-16) ### Features * An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18)) * An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18))
google-cloud-netapp: 0.3.14 ## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.13...google-cloud-netapp-v0.3.14) (2024-09-16) ### Features * A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) * A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) * A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) * **api:** [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c))
google-cloud-orchestration-airflow: 1.14.0 ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-orchestration-airflow-v1.13.1...google-cloud-orchestration-airflow-v1.14.0) (2024-09-16) ### Features * [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new message `AirflowMetadataRetentionPolicyConfig` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new message `CheckUpgradeRequest` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) ### Documentation * A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A comment for message `WorkloadsConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31))
google-maps-routeoptimization: 0.1.3 ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.2...google-maps-routeoptimization-v0.1.3) (2024-09-16) ### Features * [google-maps-routeoptimization] minor fields and documentation update ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A new message `RouteModifiers` is added ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) ### Documentation * A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for message `OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for message `TimeWindow` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: ohmayr --- .release-please-manifest.json | 18 +++++++-------- packages/google-apps-chat/CHANGELOG.md | 12 ++++++++++ .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/gapic_version.py | 2 +- .../snippet_metadata_google.chat.v1.json | 2 +- packages/google-cloud-asset/CHANGELOG.md | 7 ++++++ .../google/cloud/asset/gapic_version.py | 2 +- .../google/cloud/asset_v1/gapic_version.py | 2 +- .../cloud/asset_v1p1beta1/gapic_version.py | 2 +- .../cloud/asset_v1p2beta1/gapic_version.py | 2 +- .../cloud/asset_v1p4beta1/gapic_version.py | 2 +- .../cloud/asset_v1p5beta1/gapic_version.py | 2 +- ...nippet_metadata_google.cloud.asset.v1.json | 2 +- ...metadata_google.cloud.asset.v1p1beta1.json | 2 +- ...metadata_google.cloud.asset.v1p2beta1.json | 2 +- ...metadata_google.cloud.asset.v1p5beta1.json | 2 +- packages/google-cloud-batch/CHANGELOG.md | 7 ++++++ .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../cloud/batch_v1alpha/gapic_version.py | 2 +- ...nippet_metadata_google.cloud.batch.v1.json | 2 +- ...t_metadata_google.cloud.batch.v1alpha.json | 2 +- .../CHANGELOG.md | 23 +++++++++++++++++++ .../cloudcontrolspartner/gapic_version.py | 2 +- .../cloudcontrolspartner_v1/gapic_version.py | 2 +- .../gapic_version.py | 2 +- ..._google.cloud.cloudcontrolspartner.v1.json | 2 +- ...gle.cloud.cloudcontrolspartner.v1beta.json | 2 +- packages/google-cloud-dataproc/CHANGELOG.md | 14 +++++++++++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/gapic_version.py | 2 +- ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../google-cloud-gke-multicloud/CHANGELOG.md | 8 +++++++ .../cloud/gke_multicloud/gapic_version.py | 2 +- .../cloud/gke_multicloud_v1/gapic_version.py | 2 +- ...etadata_google.cloud.gkemulticloud.v1.json | 2 +- packages/google-cloud-netapp/CHANGELOG.md | 10 ++++++++ .../google/cloud/netapp/gapic_version.py | 2 +- .../google/cloud/netapp_v1/gapic_version.py | 2 +- ...ippet_metadata_google.cloud.netapp.v1.json | 2 +- .../CHANGELOG.md | 19 +++++++++++++++ .../airflow/service/gapic_version.py | 2 +- .../airflow/service_v1/gapic_version.py | 2 +- .../airflow/service_v1beta1/gapic_version.py | 2 +- ...loud.orchestration.airflow.service.v1.json | 2 +- ...orchestration.airflow.service.v1beta1.json | 2 +- .../CHANGELOG.md | 23 +++++++++++++++++++ .../maps/routeoptimization/gapic_version.py | 2 +- .../routeoptimization_v1/gapic_version.py | 2 +- ...data_google.maps.routeoptimization.v1.json | 2 +- 50 files changed, 172 insertions(+), 49 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2a86228c483b..19f3bed603ce 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -5,7 +5,7 @@ "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", "packages/google-apps-card": "0.1.4", - "packages/google-apps-chat": "0.1.10", + "packages/google-apps-chat": "0.1.11", "packages/google-apps-events-subscriptions": "0.1.2", "packages/google-apps-meet": "0.1.8", "packages/google-apps-script-type": "0.3.10", @@ -23,12 +23,12 @@ "packages/google-cloud-appengine-logging": "1.4.5", "packages/google-cloud-apphub": "0.1.2", "packages/google-cloud-artifact-registry": "1.11.5", - "packages/google-cloud-asset": "3.26.3", + "packages/google-cloud-asset": "3.26.4", "packages/google-cloud-assured-workloads": "1.12.5", "packages/google-cloud-automl": "2.13.5", "packages/google-cloud-backupdr": "0.1.3", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.27", + "packages/google-cloud-batch": "0.17.28", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -49,7 +49,7 @@ "packages/google-cloud-build": "3.24.2", "packages/google-cloud-certificate-manager": "1.7.2", "packages/google-cloud-channel": "1.18.5", - "packages/google-cloud-cloudcontrolspartner": "0.1.3", + "packages/google-cloud-cloudcontrolspartner": "0.2.0", "packages/google-cloud-cloudquotas": "0.1.10", "packages/google-cloud-commerce-consumer-procurement": "0.1.7", "packages/google-cloud-common": "1.3.5", @@ -68,7 +68,7 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.11.0", + "packages/google-cloud-dataproc": "5.12.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", @@ -92,7 +92,7 @@ "packages/google-cloud-gke-backup": "0.5.11", "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", - "packages/google-cloud-gke-multicloud": "0.6.12", + "packages/google-cloud-gke-multicloud": "0.6.13", "packages/google-cloud-gsuiteaddons": "0.3.10", "packages/google-cloud-iam": "2.15.2", "packages/google-cloud-iam-logging": "1.3.5", @@ -110,14 +110,14 @@ "packages/google-cloud-monitoring": "2.22.2", "packages/google-cloud-monitoring-dashboards": "2.15.3", "packages/google-cloud-monitoring-metrics-scopes": "1.6.5", - "packages/google-cloud-netapp": "0.3.13", + "packages/google-cloud-netapp": "0.3.14", "packages/google-cloud-network-connectivity": "2.4.5", "packages/google-cloud-network-management": "1.18.0", "packages/google-cloud-network-security": "0.9.11", "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", - "packages/google-cloud-orchestration-airflow": "1.13.1", + "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", "packages/google-cloud-parallelstore": "0.2.3", @@ -183,7 +183,7 @@ "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", "packages/google-maps-places": "0.1.17", - "packages/google-maps-routeoptimization": "0.1.2", + "packages/google-maps-routeoptimization": "0.1.3", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", "packages/google-shopping-css": "0.1.8", diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index 2ed2b6a49d98..ca79f8626cc8 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.10...google-apps-chat-v0.1.11) (2024-09-16) + + +### Features + +* If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + + +### Documentation + +* A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + ## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index ae65b2bfaefe..f0d8a1017646 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.0" + "version": "0.1.11" }, "snippets": [ { diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index a038645b6f8a..733c3b3086d1 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.26.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.3...google-cloud-asset-v3.26.4) (2024-09-16) + + +### Documentation + +* [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` ([#13076](https://github.com/googleapis/google-cloud-python/issues/13076)) ([35b2c45](https://github.com/googleapis/google-cloud-python/commit/35b2c456c6791bc47ffe894f3ef966558cb6c98e)) + ## [3.26.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.2...google-cloud-asset-v3.26.3) (2024-07-30) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index e6f4a2765144..d0552744bb07 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5a90dfa88b31..ada630458cc7 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index dcbeb822733b..d088663067e5 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 38eaede856da..6af620ebaf84 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 13ffc229e3a7..1d6c2a34e3ae 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index addee943a3ea..27c494159115 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.17.28](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.27...google-cloud-batch-v0.17.28) (2024-09-16) + + +### Features + +* [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` ([#13074](https://github.com/googleapis/google-cloud-python/issues/13074)) ([76267b2](https://github.com/googleapis/google-cloud-python/commit/76267b2b8998fd2a3602ebf4d12d2aaa30a90cde)) + ## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 558c8aab67c5..5f7f6c52ce54 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.28" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 558c8aab67c5..5f7f6c52ce54 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.28" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 558c8aab67c5..5f7f6c52ce54 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.28" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e2df1067e4dd..4aeac54efe09 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.28" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7f67670b100c..88b47050fed2 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.28" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index b2bd23a8caac..275e18ff132a 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.3...google-cloud-cloudcontrolspartner-v0.2.0) (2024-09-16) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed + +### Features + +* A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Bug Fixes + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Documentation + +* A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.2...google-cloud-cloudcontrolspartner-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 5a93fc370b33..606c14b81f01 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index 642805220b5c..9c0039bf1f65 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index 00b658392846..7a8b08f948bd 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) + + +### Features + +* [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add kms key input for create cluster API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] add resource reference for KMS keys and fix comments ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST batch templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST jobs API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST workflow template API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Allow flink and trino job support for workflow templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Allow flink job support for jobs ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) + ## [5.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.2...google-cloud-dataproc-v5.11.0) (2024-09-03) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..435e79ea7a30 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..435e79ea7a30 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..a44d5d6db9b3 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-multicloud/CHANGELOG.md b/packages/google-cloud-gke-multicloud/CHANGELOG.md index fb3583312dfb..415808ddd4f9 100644 --- a/packages/google-cloud-gke-multicloud/CHANGELOG.md +++ b/packages/google-cloud-gke-multicloud/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.6.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.12...google-cloud-gke-multicloud-v0.6.13) (2024-09-16) + + +### Features + +* An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18)) +* An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18)) + ## [0.6.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.11...google-cloud-gke-multicloud-v0.6.12) (2024-07-30) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py index 558c8aab67c5..b72badcc1eca 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.13" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py index 558c8aab67c5..b72badcc1eca 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.13" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json index 08dd05577dce..7fec7507cd76 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-multicloud", - "version": "0.1.0" + "version": "0.6.13" }, "snippets": [ { diff --git a/packages/google-cloud-netapp/CHANGELOG.md b/packages/google-cloud-netapp/CHANGELOG.md index d794a24d9b59..5313d0e0147c 100644 --- a/packages/google-cloud-netapp/CHANGELOG.md +++ b/packages/google-cloud-netapp/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.13...google-cloud-netapp-v0.3.14) (2024-09-16) + + +### Features + +* A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) +* A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) +* A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) +* **api:** [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) + ## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.12...google-cloud-netapp-v0.3.13) (2024-07-31) diff --git a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py index 558c8aab67c5..0106eadcd8d9 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.14" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py index 558c8aab67c5..0106eadcd8d9 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.14" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index a43c86575bc2..59c739a9b3ab 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-netapp", - "version": "0.1.0" + "version": "0.3.14" }, "snippets": [ { diff --git a/packages/google-cloud-orchestration-airflow/CHANGELOG.md b/packages/google-cloud-orchestration-airflow/CHANGELOG.md index f48353fb4a09..94c3195d35e3 100644 --- a/packages/google-cloud-orchestration-airflow/CHANGELOG.md +++ b/packages/google-cloud-orchestration-airflow/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-orchestration-airflow-v1.13.1...google-cloud-orchestration-airflow-v1.14.0) (2024-09-16) + + +### Features + +* [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new message `AirflowMetadataRetentionPolicyConfig` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new message `CheckUpgradeRequest` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) + + +### Documentation + +* A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A comment for message `WorkloadsConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) + ## [1.13.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-orchestration-airflow-v1.13.0...google-cloud-orchestration-airflow-v1.13.1) (2024-07-30) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 389370672713..39aa0d2d425d 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow", - "version": "0.1.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index 4c413d500bdb..e42c5d68150d 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow-service", - "version": "0.1.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-maps-routeoptimization/CHANGELOG.md b/packages/google-maps-routeoptimization/CHANGELOG.md index 3588a5750cea..14bb0c6b2dc5 100644 --- a/packages/google-maps-routeoptimization/CHANGELOG.md +++ b/packages/google-maps-routeoptimization/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.2...google-maps-routeoptimization-v0.1.3) (2024-09-16) + + +### Features + +* [google-maps-routeoptimization] minor fields and documentation update ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A new message `RouteModifiers` is added ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) + + +### Documentation + +* A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for message `OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for message `TimeWindow` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.1...google-maps-routeoptimization-v0.1.2) (2024-07-30) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index c329d83ca2a2..ff99ce099d17 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { From 1f8b5640b0ac5397318ede4ebcfa120120ebccc8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:13:48 -0400 Subject: [PATCH 26/59] feat: [google-cloud-dialogflow] created new boolean fields in conversation model for zone isolation and zone separation compliance status (#13096) BEGIN_COMMIT_OVERRIDE feat: created new boolean fields in conversation model for zone isolation and zone separation compliance status END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 676665642 Source-Link: https://github.com/googleapis/googleapis/commit/f87ae4487b303f32c3ddc9638649d32dda2e2776 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d66ac41aa072bbc4367f7d2758d0d0ba7a7094a5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpYWxvZ2Zsb3cvLk93bEJvdC55YW1sIiwiaCI6ImQ2NmFjNDFhYTA3MmJiYzQzNjdmN2QyNzU4ZDBkMGJhN2E3MDk0YTUifQ== --------- Co-authored-by: Owl Bot --- .../dialogflow_v2/types/conversation_model.py | 20 +++++++++++++++++++ .../dialogflow_v2/test_conversation_models.py | 16 +++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py index 21c3e927a509..0170ee992c2f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py @@ -89,6 +89,16 @@ class ConversationModel(proto.Message): Metadata for smart reply models. This field is a member of `oneof`_ ``model_metadata``. + satisfies_pzs (bool): + Output only. A read only boolean field + reflecting Zone Separation status of the model. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. A read only boolean field + reflecting Zone Isolation status of the model. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -183,6 +193,16 @@ class ModelType(proto.Enum): oneof="model_metadata", message="SmartReplyModelMetadata", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=25, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=26, + optional=True, + ) class ConversationModelEvaluation(proto.Message): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py index cd7615c81d65..4f435eac5dd1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py @@ -1614,6 +1614,8 @@ def test_get_conversation_model(request_type, transport: str = "grpc"): display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_conversation_model(request) @@ -1629,6 +1631,8 @@ def test_get_conversation_model(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.state == conversation_model.ConversationModel.State.CREATING assert response.language_code == "language_code_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_conversation_model_empty_call(): @@ -1742,6 +1746,8 @@ async def test_get_conversation_model_empty_call_async(): display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_conversation_model() @@ -1817,6 +1823,8 @@ async def test_get_conversation_model_async( display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_conversation_model(request) @@ -1833,6 +1841,8 @@ async def test_get_conversation_model_async( assert response.display_name == "display_name_value" assert response.state == conversation_model.ConversationModel.State.CREATING assert response.language_code == "language_code_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -5028,6 +5038,8 @@ def test_create_conversation_model_rest(request_type): "language_code": "language_code_value", "article_suggestion_model_metadata": {"training_model_type": 2}, "smart_reply_model_metadata": {"training_model_type": 2}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5414,6 +5426,8 @@ def test_get_conversation_model_rest(request_type): display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -5433,6 +5447,8 @@ def test_get_conversation_model_rest(request_type): assert response.display_name == "display_name_value" assert response.state == conversation_model.ConversationModel.State.CREATING assert response.language_code == "language_code_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_conversation_model_rest_use_cached_wrapped_rpc(): From 65f098a1125677c69240849703a0b97bcab7fc4c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:21:01 -0400 Subject: [PATCH 27/59] feat: [google-analytics-data] add `GetPropertyQuotasSnapshot` method to the Data API v1alpha (#13095) BEGIN_COMMIT_OVERRIDE feat: add `GetPropertyQuotasSnapshot` method to the Data API v1alpha feat: add `PropertyQuotasSnapshot` type to the Data API v1alpha docs: update the documentation for the `CreateReportTask` method END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add `PropertyQuotasSnapshot` type to the Data API v1alpha docs: update the documentation for the `CreateReportTask` method PiperOrigin-RevId: 676527881 Source-Link: https://github.com/googleapis/googleapis/commit/923b6f3167fc309d9501bc97a6ab67cfe522522e Source-Link: https://github.com/googleapis/googleapis-gen/commit/8729c5b4cdacca9673ceab43f90bffdc97aa5147 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFuYWx5dGljcy1kYXRhLy5Pd2xCb3QueWFtbCIsImgiOiI4NzI5YzViNGNkYWNjYTk2NzNjZWFiNDNmOTBiZmZkYzk3YWE1MTQ3In0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/analytics/data_v1alpha/__init__.py | 4 + .../data_v1alpha/gapic_metadata.json | 15 + .../alpha_analytics_data/async_client.py | 124 + .../services/alpha_analytics_data/client.py | 132 + .../alpha_analytics_data/transports/base.py | 17 + .../alpha_analytics_data/transports/grpc.py | 37 + .../transports/grpc_asyncio.py | 42 + .../alpha_analytics_data/transports/rest.py | 136 + .../analytics/data_v1alpha/types/__init__.py | 4 + .../data_v1alpha/types/analytics_data_api.py | 64 +- .../analytics/data_v1alpha/types/data.py | 4 +- ...data_get_property_quotas_snapshot_async.py | 52 + ..._data_get_property_quotas_snapshot_sync.py | 52 + ...etadata_google.analytics.data.v1alpha.json | 161 + .../scripts/fixup_data_v1alpha_keywords.py | 1 + .../data_v1alpha/test_alpha_analytics_data.py | 3373 ++++++++++------- 16 files changed, 2887 insertions(+), 1331 deletions(-) create mode 100644 packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py create mode 100644 packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py index d27c32f3b750..4b5c6ad3ac51 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py @@ -32,6 +32,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -40,6 +41,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -172,6 +174,7 @@ "FunnelStep", "FunnelSubReport", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "InListFilter", @@ -190,6 +193,7 @@ "NumericValue", "OrderBy", "PropertyQuota", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json index 886097e06d3f..bb6c0b6f462d 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json @@ -30,6 +30,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -100,6 +105,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -170,6 +180,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index 5afbe71746b5..85f47086326b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -71,6 +71,12 @@ class AlphaAnalyticsDataAsyncClient: parse_audience_list_path = staticmethod( AlphaAnalyticsDataClient.parse_audience_list_path ) + property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.property_quotas_snapshot_path + ) + parse_property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path + ) recurring_audience_list_path = staticmethod( AlphaAnalyticsDataClient.recurring_audience_list_path ) @@ -1468,6 +1474,118 @@ async def sample_list_recurring_audience_lists(): # Done; return the response. return response + async def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (:class:`str`): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_report_task( self, request: Optional[ @@ -1485,6 +1603,12 @@ async def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 4a3bc827021a..2b333ae4af4a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -207,6 +207,21 @@ def parse_audience_list_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def property_quotas_snapshot_path( + property: str, + ) -> str: + """Returns a fully-qualified property_quotas_snapshot string.""" + return "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + + @staticmethod + def parse_property_quotas_snapshot_path(path: str) -> Dict[str, str]: + """Parses a property_quotas_snapshot path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/propertyQuotasSnapshot$", path) + return m.groupdict() if m else {} + @staticmethod def recurring_audience_list_path( property: str, @@ -1898,6 +1913,117 @@ def sample_list_recurring_audience_lists(): # Done; return the response. return response + def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (str): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_report_task( self, request: Optional[ @@ -1915,6 +2041,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py index c2c66c588816..3ba97b9f363e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -180,6 +180,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method.wrap_method( self.create_report_task, default_timeout=None, @@ -320,6 +325,18 @@ def list_recurring_audience_lists( ]: raise NotImplementedError() + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Union[ + analytics_data_api.PropertyQuotasSnapshot, + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ], + ]: + raise NotImplementedError() + @property def create_report_task( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index b9962cad40e2..c43f7d864e80 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -659,6 +659,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + ~.PropertyQuotasSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -672,6 +703,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], ~.Operation]: diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py index c05a987fbb2b..a220f2ddb524 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -667,6 +667,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + Awaitable[~.PropertyQuotasSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -681,6 +712,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], Awaitable[~.Operation]]: @@ -841,6 +878,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method_async.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method_async.wrap_method( self.create_report_task, default_timeout=None, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py index 5f98dacd404b..510c1d55640b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -103,6 +103,14 @@ def post_get_audience_list(self, response): logging.log(f"Received response: {response}") return response + def pre_get_property_quotas_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_property_quotas_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_recurring_audience_list(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -275,6 +283,31 @@ def post_get_audience_list( """ return response + def pre_get_property_quotas_snapshot( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.GetPropertyQuotasSnapshotRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_property_quotas_snapshot( + self, response: analytics_data_api.PropertyQuotasSnapshot + ) -> analytics_data_api.PropertyQuotasSnapshot: + """Post-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + def pre_get_recurring_audience_list( self, request: analytics_data_api.GetRecurringAudienceListRequest, @@ -1002,6 +1035,98 @@ def __call__( resp = self._interceptor.post_get_audience_list(resp) return resp + class _GetPropertyQuotasSnapshot(AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("GetPropertyQuotasSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Call the get property quotas + snapshot method over HTTP. + + Args: + request (~.analytics_data_api.GetPropertyQuotasSnapshotRequest): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/propertyQuotasSnapshot}", + }, + ] + request, metadata = self._interceptor.pre_get_property_quotas_snapshot( + request, metadata + ) + pb_request = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.PropertyQuotasSnapshot() + pb_resp = analytics_data_api.PropertyQuotasSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_property_quotas_snapshot(resp) + return resp + class _GetRecurringAudienceList(AlphaAnalyticsDataRestStub): def __hash__(self): return hash("GetRecurringAudienceList") @@ -1876,6 +2001,17 @@ def get_audience_list( # In C++ this would require a dynamic_cast return self._GetAudienceList(self._session, self._host, self._interceptor) # type: ignore + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPropertyQuotasSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def get_recurring_audience_list( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py index be0c011beb28..c5f36ceee95f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py @@ -23,6 +23,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -31,6 +32,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -129,6 +131,7 @@ "CreateRecurringAudienceListRequest", "CreateReportTaskRequest", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "ListAudienceListsRequest", @@ -137,6 +140,7 @@ "ListRecurringAudienceListsResponse", "ListReportTasksRequest", "ListReportTasksResponse", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index 40cf2af6b247..5ef02adbab94 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -31,6 +31,8 @@ "GetRecurringAudienceListRequest", "ListRecurringAudienceListsRequest", "ListRecurringAudienceListsResponse", + "GetPropertyQuotasSnapshotRequest", + "PropertyQuotasSnapshot", "GetAudienceListRequest", "ListAudienceListsRequest", "ListAudienceListsResponse", @@ -368,6 +370,60 @@ def raw_page(self): ) +class GetPropertyQuotasSnapshotRequest(proto.Message): + r"""A request to return the PropertyQuotasSnapshot for a given + category. + + Attributes: + name (str): + Required. Quotas from this property will be listed in the + response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PropertyQuotasSnapshot(proto.Message): + r"""Current state of all Property Quotas organized by quota + category. + + Attributes: + name (str): + Identifier. The property quota snapshot + resource name. + core_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for core property tokens + realtime_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for realtime property tokens + funnel_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for funnel property tokens + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + core_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=1, + message=data.PropertyQuota, + ) + realtime_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=2, + message=data.PropertyQuota, + ) + funnel_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=3, + message=data.PropertyQuota, + ) + + class GetAudienceListRequest(proto.Message): r"""A request to retrieve configuration metadata about a specific audience list. @@ -942,7 +998,7 @@ class RunFunnelReportRequest(proto.Message): Attributes: property (str): - Optional. A Google Analytics GA4 property identifier whose + Optional. A Google Analytics property identifier whose events are tracked. Specified in the URL path and not the body. To learn more, see `where to find your Property ID `__. @@ -1146,7 +1202,7 @@ class ReportTask(proto.Message): name (str): Output only. Identifier. The report task resource name assigned during creation. Format: - ``properties/{property}/reportTasks/{report_task}`` + "properties/{property}/reportTasks/{report_task}". report_definition (google.analytics.data_v1alpha.types.ReportTask.ReportDefinition): Optional. A report definition to fetch report data, which describes the structure of a report. @@ -1236,8 +1292,8 @@ class ReportDefinition(proto.Message): returned if they are not separately removed by a filter. Regardless of this ``keep_empty_rows`` setting, only data - recorded by the Google Analytics (GA4) property can be - displayed in a report. + recorded by the Google Analytics property can be displayed + in a report. For example if a property never logs a ``purchase`` event, then a query for the ``eventName`` dimension and diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py index d0d65e10b736..f3e86639400b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py @@ -293,7 +293,7 @@ class MetricType(proto.Enum): class RestrictedMetricType(proto.Enum): r"""Categories of data that you may be restricted from viewing on - certain GA4 properties. + certain Google Analytics properties. Values: RESTRICTED_METRIC_TYPE_UNSPECIFIED (0): @@ -2231,7 +2231,7 @@ class Segment(proto.Message): particular line of products or who visit a specific part of your site or trigger certain events in your app. - To learn more, see `GA4 Segment + To learn more, see `Segment Builder `__. This message has `oneof`_ fields (mutually exclusive fields). diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py new file mode 100644 index 000000000000..cfa47528bf6b --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py new file mode 100644 index 000000000000..964edacdbb5d --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 619e88f4243b..15f8d0d6e6d8 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -679,6 +679,167 @@ ], "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py index 416f17a987b4..463b61dcee73 100644 --- a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py +++ b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py @@ -43,6 +43,7 @@ class dataCallTransformer(cst.CSTTransformer): 'create_recurring_audience_list': ('parent', 'recurring_audience_list', ), 'create_report_task': ('parent', 'report_task', ), 'get_audience_list': ('name', ), + 'get_property_quotas_snapshot': ('name', ), 'get_recurring_audience_list': ('name', ), 'get_report_task': ('name', ), 'list_audience_lists': ('parent', 'page_size', 'page_token', ), diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 16d76b2f738b..1a4da5db4733 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -5120,11 +5120,11 @@ async def test_list_recurring_audience_lists_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateReportTaskRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_create_report_task(request_type, transport: str = "grpc"): +def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5136,23 +5136,26 @@ def test_create_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) + response = client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_create_report_task_empty_call(): +def test_get_property_quotas_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5162,18 +5165,18 @@ def test_create_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task() + client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() -def test_create_report_task_non_empty_request_with_auto_populated_field(): +def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5184,26 +5187,26 @@ def test_create_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + request = analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task(request=request) + client.get_property_quotas_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) -def test_create_report_task_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5218,7 +5221,8 @@ def test_create_report_task_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_report_task in client._transport._wrapped_methods + client._transport.get_property_quotas_snapshot + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -5227,20 +5231,15 @@ def test_create_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_report_task + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5248,7 +5247,7 @@ def test_create_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_report_task_empty_call_async(): +async def test_get_property_quotas_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5258,20 +5257,22 @@ async def test_create_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task() + response = await client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() @pytest.mark.asyncio -async def test_create_report_task_async_use_cached_wrapped_rpc( +async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5288,7 +5289,7 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot in client._client._transport._wrapped_methods ) @@ -5296,21 +5297,16 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5318,9 +5314,9 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_report_task_async( +async def test_get_property_quotas_snapshot_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.CreateReportTaskRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5333,46 +5329,49 @@ async def test_create_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task(request) + response = await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_create_report_task_async_from_dict(): - await test_create_report_task_async(request_type=dict) +async def test_get_property_quotas_snapshot_async_from_dict(): + await test_get_property_quotas_snapshot_async(request_type=dict) -def test_create_report_task_field_headers(): +def test_get_property_quotas_snapshot_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5383,30 +5382,30 @@ def test_create_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_report_task_field_headers_async(): +async def test_get_property_quotas_snapshot_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + analytics_data_api.PropertyQuotasSnapshot() ) - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5417,41 +5416,37 @@ async def test_create_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_report_task_flattened(): +def test_get_property_quotas_snapshot_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_report_task_flattened_error(): +def test_get_property_quotas_snapshot_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5459,50 +5454,45 @@ def test_create_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_report_task_flattened_async(): +async def test_get_property_quotas_snapshot_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + response = await client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_report_task_flattened_error_async(): +async def test_get_property_quotas_snapshot_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5510,21 +5500,20 @@ async def test_create_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + await client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryReportTaskRequest, + analytics_data_api.CreateReportTaskRequest, dict, ], ) -def test_query_report_task(request_type, transport: str = "grpc"): +def test_create_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5536,26 +5525,23 @@ def test_query_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) - response = client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) -def test_query_report_task_empty_call(): +def test_create_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5565,18 +5551,18 @@ def test_query_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task() + client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() -def test_query_report_task_non_empty_request_with_auto_populated_field(): +def test_create_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5587,26 +5573,26 @@ def test_query_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.QueryReportTaskRequest( - name="name_value", + request = analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task(request=request) + client.create_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest( - name="name_value", + assert args[0] == analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) -def test_query_report_task_use_cached_wrapped_rpc(): +def test_create_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5620,7 +5606,9 @@ def test_query_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.query_report_task in client._transport._wrapped_methods + assert ( + client._transport.create_report_task in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5628,15 +5616,20 @@ def test_query_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_report_task + client._transport.create_report_task ] = mock_rpc request = {} - client.query_report_task(request) + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5644,7 +5637,7 @@ def test_query_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_query_report_task_empty_call_async(): +async def test_create_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5654,22 +5647,20 @@ async def test_query_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task() + response = await client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() @pytest.mark.asyncio -async def test_query_report_task_async_use_cached_wrapped_rpc( +async def test_create_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5686,7 +5677,7 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.query_report_task + client._client._transport.create_report_task in client._client._transport._wrapped_methods ) @@ -5694,16 +5685,21 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.query_report_task + client._client._transport.create_report_task ] = mock_rpc request = {} - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5711,9 +5707,9 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_query_report_task_async( +async def test_create_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.QueryReportTaskRequest, + request_type=analytics_data_api.CreateReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5726,49 +5722,46 @@ async def test_query_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task(request) + response = await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_query_report_task_async_from_dict(): - await test_query_report_task_async(request_type=dict) +async def test_create_report_task_async_from_dict(): + await test_create_report_task_async(request_type=dict) -def test_query_report_task_field_headers(): +def test_create_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: - call.return_value = analytics_data_api.QueryReportTaskResponse() - client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5779,30 +5772,30 @@ def test_query_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_query_report_task_field_headers_async(): +async def test_create_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/op") ) - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5813,37 +5806,41 @@ async def test_query_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_query_report_task_flattened(): +def test_create_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.query_report_task( - name="name_value", + client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val -def test_query_report_task_flattened_error(): +def test_create_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5851,45 +5848,50 @@ def test_query_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.asyncio -async def test_query_report_task_flattened_async(): +async def test_create_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.query_report_task( - name="name_value", + response = await client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_query_report_task_flattened_error_async(): +async def test_create_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5897,20 +5899,21 @@ async def test_query_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + await client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetReportTaskRequest, + analytics_data_api.QueryReportTaskRequest, dict, ], ) -def test_get_report_task(request_type, transport: str = "grpc"): +def test_query_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5921,25 +5924,27 @@ def test_get_report_task(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask( - name="name_value", + call.return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, ) - response = client.get_report_task(request) + response = client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 -def test_get_report_task_empty_call(): +def test_query_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5948,17 +5953,19 @@ def test_get_report_task_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task() + client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() -def test_get_report_task_non_empty_request_with_auto_populated_field(): +def test_query_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5969,24 +5976,26 @@ def test_get_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetReportTaskRequest( + request = analytics_data_api.QueryReportTaskRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task(request=request) + client.query_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest( + assert args[0] == analytics_data_api.QueryReportTaskRequest( name="name_value", ) -def test_get_report_task_use_cached_wrapped_rpc(): +def test_query_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6000,21 +6009,23 @@ def test_get_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_report_task in client._transport._wrapped_methods + assert client._transport.query_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + client._transport._wrapped_methods[ + client._transport.query_report_task + ] = mock_rpc request = {} - client.get_report_task(request) + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_report_task(request) + client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6022,7 +6033,7 @@ def test_get_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_report_task_empty_call_async(): +async def test_query_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6031,21 +6042,23 @@ async def test_get_report_task_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task() + response = await client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() @pytest.mark.asyncio -async def test_get_report_task_async_use_cached_wrapped_rpc( +async def test_query_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6062,7 +6075,7 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_report_task + client._client._transport.query_report_task in client._client._transport._wrapped_methods ) @@ -6070,16 +6083,16 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_report_task + client._client._transport.query_report_task ] = mock_rpc request = {} - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_report_task(request) + await client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6087,9 +6100,9 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_report_task_async( +async def test_query_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetReportTaskRequest, + request_type=analytics_data_api.QueryReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6101,46 +6114,50 @@ async def test_get_report_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task(request) + response = await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 @pytest.mark.asyncio -async def test_get_report_task_async_from_dict(): - await test_get_report_task_async(request_type=dict) +async def test_query_report_task_async_from_dict(): + await test_query_report_task_async(request_type=dict) -def test_get_report_task_field_headers(): +def test_query_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - call.return_value = analytics_data_api.ReportTask() - client.get_report_task(request) + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6156,23 +6173,25 @@ def test_get_report_task_field_headers(): @pytest.mark.asyncio -async def test_get_report_task_field_headers_async(): +async def test_query_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6187,18 +6206,20 @@ async def test_get_report_task_field_headers_async(): ) in kw["metadata"] -def test_get_report_task_flattened(): +def test_query_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_report_task( + client.query_report_task( name="name_value", ) @@ -6211,7 +6232,7 @@ def test_get_report_task_flattened(): assert arg == mock_val -def test_get_report_task_flattened_error(): +def test_query_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6219,29 +6240,31 @@ def test_get_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_report_task_flattened_async(): +async def test_query_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_report_task( + response = await client.query_report_task( name="name_value", ) @@ -6255,7 +6278,7 @@ async def test_get_report_task_flattened_async(): @pytest.mark.asyncio -async def test_get_report_task_flattened_error_async(): +async def test_query_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6263,8 +6286,8 @@ async def test_get_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + await client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @@ -6272,11 +6295,11 @@ async def test_get_report_task_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListReportTasksRequest, + analytics_data_api.GetReportTaskRequest, dict, ], ) -def test_list_report_tasks(request_type, transport: str = "grpc"): +def test_get_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6287,27 +6310,25 @@ def test_list_report_tasks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_data_api.ReportTask( + name="name_value", ) - response = client.list_report_tasks(request) + response = client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" -def test_list_report_tasks_empty_call(): +def test_get_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -6316,19 +6337,17 @@ def test_list_report_tasks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks() + client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() -def test_list_report_tasks_non_empty_request_with_auto_populated_field(): +def test_get_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -6339,28 +6358,24 @@ def test_list_report_tasks_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_data_api.GetReportTaskRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks(request=request) + client.get_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_data_api.GetReportTaskRequest( + name="name_value", ) -def test_list_report_tasks_use_cached_wrapped_rpc(): +def test_get_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6374,23 +6389,21 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_report_tasks in client._transport._wrapped_methods + assert client._transport.get_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_report_tasks - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc request = {} - client.list_report_tasks(request) + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_report_tasks(request) + client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6398,7 +6411,7 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_report_tasks_empty_call_async(): +async def test_get_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6407,23 +6420,21 @@ async def test_list_report_tasks_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks() + response = await client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() @pytest.mark.asyncio -async def test_list_report_tasks_async_use_cached_wrapped_rpc( +async def test_get_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6440,7 +6451,7 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_report_tasks + client._client._transport.get_report_task in client._client._transport._wrapped_methods ) @@ -6448,16 +6459,16 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_report_tasks + client._client._transport.get_report_task ] = mock_rpc request = {} - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6465,9 +6476,9 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_report_tasks_async( +async def test_get_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListReportTasksRequest, + request_type=analytics_data_api.GetReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6479,50 +6490,46 @@ async def test_list_report_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks(request) + response = await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_report_tasks_async_from_dict(): - await test_list_report_tasks_async(request_type=dict) +async def test_get_report_task_async_from_dict(): + await test_get_report_task_async(request_type=dict) -def test_list_report_tasks_field_headers(): +def test_get_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - call.return_value = analytics_data_api.ListReportTasksResponse() - client.list_report_tasks(request) + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6533,30 +6540,28 @@ def test_list_report_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_report_tasks_field_headers_async(): +async def test_get_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6567,37 +6572,35 @@ async def test_list_report_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_report_tasks_flattened(): +def test_get_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_report_tasks( - parent="parent_value", + client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_report_tasks_flattened_error(): +def test_get_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6605,45 +6608,43 @@ def test_list_report_tasks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_report_tasks_flattened_async(): +async def test_get_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_report_tasks( - parent="parent_value", + response = await client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_report_tasks_flattened_error_async(): +async def test_get_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6651,111 +6652,499 @@ async def test_list_report_tasks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + await client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) -def test_list_report_tasks_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.ListReportTasksRequest, + dict, + ], +) +def test_list_report_tasks(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) - pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + response = client.list_report_tasks(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == "next_page_token_value" -def test_list_report_tasks_pages(transport_name: str = "grpc"): +def test_list_report_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_report_tasks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - + client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +def test_list_report_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_report_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_report_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_report_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_report_tasks + ] = mock_rpc + request = {} + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_report_tasks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_report_tasks + ] = mock_rpc + + request = {} + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.ListReportTasksRequest, +): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_from_dict(): + await test_list_report_tasks_async(request_type=dict) + + +def test_list_report_tasks_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_report_tasks_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_report_tasks_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_report_tasks_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +def test_list_report_tasks_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + + +def test_list_report_tasks_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.asyncio async def test_list_report_tasks_async_pager(): client = AlphaAnalyticsDataAsyncClient( @@ -6859,47 +7248,293 @@ async def test_list_report_tasks_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.RunFunnelReportRequest, - dict, - ], -) -def test_run_funnel_report_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunFunnelReportRequest, + dict, + ], +) +def test_run_funnel_report_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunFunnelReportResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_funnel_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == "kind_value" + + +def test_run_funnel_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_funnel_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_funnel_report + ] = mock_rpc + + request = {} + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_funnel_report_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunFunnelReportRequest.pb( + analytics_data_api.RunFunnelReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( + analytics_data_api.RunFunnelReportResponse() + ) + + request = analytics_data_api.RunFunnelReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunFunnelReportResponse() + + client.run_funnel_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_funnel_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_funnel_report(request) + + +def test_run_funnel_report_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.CreateAudienceListRequest, + dict, + ], +) +def test_create_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "state": 1, + "begin_creating_time": {"seconds": 751, "nanos": 543}, + "creation_quota_tokens_charged": 3070, + "row_count": 992, + "error_message": "error_message_value", + "percentage_completed": 0.2106, + "recurring_audience_list": "recurring_audience_list_value", + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ + "audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_list"][field])): + del request_init["audience_list"][field][i][subfield] + else: + del request_init["audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunFunnelReportResponse( - kind="kind_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.run_funnel_report(request) + response = client.create_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunFunnelReportResponse) - assert response.kind == "kind_value" + assert response.operation.name == "operations/spam" -def test_run_funnel_report_rest_use_cached_wrapped_rpc(): +def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6913,7 +7548,9 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.run_funnel_report in client._transport._wrapped_methods + assert ( + client._transport.create_audience_list in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6921,24 +7558,117 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_funnel_report + client._transport.create_audience_list ] = mock_rpc request = {} - client.run_funnel_report(request) + client.create_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_funnel_report(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_create_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateAudienceListRequest, +): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_audience_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "audienceList", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_funnel_report_rest_interceptors(null_interceptor): +def test_create_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6951,14 +7681,16 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.RunFunnelReportRequest.pb( - analytics_data_api.RunFunnelReportRequest() + pb_message = analytics_data_api.CreateAudienceListRequest.pb( + analytics_data_api.CreateAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -6970,55 +7702,111 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( - analytics_data_api.RunFunnelReportResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = analytics_data_api.RunFunnelReportRequest() + request = analytics_data_api.CreateAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RunFunnelReportResponse() + post.return_value = operations_pb2.Operation() + + client.create_audience_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_audience_list(request) + + +def test_create_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.run_funnel_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + client.create_audience_list(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + args[1], + ) -def test_run_funnel_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest -): +def test_create_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_funnel_report(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) -def test_run_funnel_report_rest_error(): +def test_create_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7027,125 +7815,44 @@ def test_run_funnel_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateAudienceListRequest, + analytics_data_api.QueryAudienceListRequest, dict, ], ) -def test_create_audience_list_rest(request_type): +def test_query_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "state": 1, - "begin_creating_time": {"seconds": 751, "nanos": 543}, - "creation_quota_tokens_charged": 3070, - "row_count": 992, - "error_message": "error_message_value", - "percentage_completed": 0.2106, - "recurring_audience_list": "recurring_audience_list_value", - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ - "audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["audience_list"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["audience_list"][field])): - del request_init["audience_list"][field][i][subfield] - else: - del request_init["audience_list"][field][subfield] + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 -def test_create_audience_list_rest_use_cached_wrapped_rpc(): +def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7160,7 +7867,7 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_audience_list in client._transport._wrapped_methods + client._transport.query_audience_list in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7169,33 +7876,29 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_audience_list + client._transport.query_audience_list ] = mock_rpc request = {} - client.create_audience_list(request) + client.query_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_audience_list(request) + client.query_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateAudienceListRequest, +def test_query_audience_list_rest_required_fields( + request_type=analytics_data_api.QueryAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7206,21 +7909,21 @@ def test_create_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7229,7 +7932,7 @@ def test_create_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7249,37 +7952,32 @@ def test_create_audience_list_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_audience_list_rest_unset_required_fields(): +def test_query_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "audienceList", - ) - ) - ) + unset_fields = transport.query_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_audience_list_rest_interceptors(null_interceptor): +def test_query_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7292,16 +7990,14 @@ def test_create_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateAudienceListRequest.pb( - analytics_data_api.CreateAudienceListRequest() + pb_message = analytics_data_api.QueryAudienceListRequest.pb( + analytics_data_api.QueryAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7313,19 +8009,21 @@ def test_create_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + analytics_data_api.QueryAudienceListResponse.to_json( + analytics_data_api.QueryAudienceListResponse() + ) ) - request = analytics_data_api.CreateAudienceListRequest() + request = analytics_data_api.QueryAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = analytics_data_api.QueryAudienceListResponse() - client.create_audience_list( + client.query_audience_list( request, metadata=[ ("key", "val"), @@ -7337,8 +8035,8 @@ def test_create_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +def test_query_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7346,7 +8044,7 @@ def test_create_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7358,10 +8056,10 @@ def test_create_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_audience_list(request) + client.query_audience_list(request) -def test_create_audience_list_rest_flattened(): +def test_query_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7370,38 +8068,40 @@ def test_create_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/audienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_audience_list(**mock_args) + client.query_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + % client.transport._host, args[1], ) -def test_create_audience_list_rest_flattened_error(transport: str = "rest"): +def test_query_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7410,14 +8110,13 @@ def test_create_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_audience_list( - analytics_data_api.CreateAudienceListRequest(), - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name="name_value", ) -def test_create_audience_list_rest_error(): +def test_query_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7426,11 +8125,11 @@ def test_create_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryAudienceListRequest, + analytics_data_api.SheetExportAudienceListRequest, dict, ], ) -def test_query_audience_list_rest(request_type): +def test_sheet_export_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7443,7 +8142,9 @@ def test_query_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse( + return_value = analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri="spreadsheet_uri_value", + spreadsheet_id="spreadsheet_id_value", row_count=992, ) @@ -7451,19 +8152,23 @@ def test_query_audience_list_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == "spreadsheet_uri_value" + assert response.spreadsheet_id == "spreadsheet_id_value" assert response.row_count == 992 -def test_query_audience_list_rest_use_cached_wrapped_rpc(): +def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7478,7 +8183,8 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.query_audience_list in client._transport._wrapped_methods + client._transport.sheet_export_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7487,24 +8193,24 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_audience_list + client._transport.sheet_export_audience_list ] = mock_rpc request = {} - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_query_audience_list_rest_required_fields( - request_type=analytics_data_api.QueryAudienceListRequest, +def test_sheet_export_audience_list_rest_required_fields( + request_type=analytics_data_api.SheetExportAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7520,7 +8226,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7529,7 +8235,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7543,7 +8249,7 @@ def test_query_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7565,30 +8271,32 @@ def test_query_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_query_audience_list_rest_unset_required_fields(): +def test_sheet_export_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.query_audience_list._get_unset_required_fields({}) + unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_audience_list_rest_interceptors(null_interceptor): +def test_sheet_export_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7601,14 +8309,14 @@ def test_query_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.QueryAudienceListRequest.pb( - analytics_data_api.QueryAudienceListRequest() + pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( + analytics_data_api.SheetExportAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7621,20 +8329,20 @@ def test_query_audience_list_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_data_api.QueryAudienceListResponse.to_json( - analytics_data_api.QueryAudienceListResponse() + analytics_data_api.SheetExportAudienceListResponse.to_json( + analytics_data_api.SheetExportAudienceListResponse() ) ) - request = analytics_data_api.QueryAudienceListRequest() + request = analytics_data_api.SheetExportAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.QueryAudienceListResponse() + post.return_value = analytics_data_api.SheetExportAudienceListResponse() - client.query_audience_list( + client.sheet_export_audience_list( request, metadata=[ ("key", "val"), @@ -7646,8 +8354,9 @@ def test_query_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_query_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest +def test_sheet_export_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.SheetExportAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7667,10 +8376,10 @@ def test_query_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.query_audience_list(request) + client.sheet_export_audience_list(request) -def test_query_audience_list_rest_flattened(): +def test_sheet_export_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7679,7 +8388,7 @@ def test_query_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -7694,25 +8403,27 @@ def test_query_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.query_audience_list(**mock_args) + client.sheet_export_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" % client.transport._host, args[1], ) -def test_query_audience_list_rest_flattened_error(transport: str = "rest"): +def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7721,13 +8432,13 @@ def test_query_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_audience_list( - analytics_data_api.QueryAudienceListRequest(), + client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), name="name_value", ) -def test_query_audience_list_rest_error(): +def test_sheet_export_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7736,11 +8447,11 @@ def test_query_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.SheetExportAudienceListRequest, + analytics_data_api.GetAudienceListRequest, dict, ], ) -def test_sheet_export_audience_list_rest(request_type): +def test_get_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7753,33 +8464,43 @@ def test_sheet_export_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", + return_value = analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" -def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7793,10 +8514,7 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.sheet_export_audience_list - in client._transport._wrapped_methods - ) + assert client._transport.get_audience_list in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7804,24 +8522,24 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.sheet_export_audience_list + client._transport.get_audience_list ] = mock_rpc request = {} - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_sheet_export_audience_list_rest_required_fields( - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_required_fields( + request_type=analytics_data_api.GetAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7837,7 +8555,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7846,7 +8564,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7860,7 +8578,7 @@ def test_sheet_export_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7872,42 +8590,39 @@ def test_sheet_export_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_sheet_export_audience_list_rest_unset_required_fields(): +def test_get_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) + unset_fields = transport.get_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sheet_export_audience_list_rest_interceptors(null_interceptor): +def test_get_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7920,14 +8635,14 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( - analytics_data_api.SheetExportAudienceListRequest() + pb_message = analytics_data_api.GetAudienceListRequest.pb( + analytics_data_api.GetAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7939,21 +8654,19 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.SheetExportAudienceListResponse.to_json( - analytics_data_api.SheetExportAudienceListResponse() - ) + req.return_value._content = analytics_data_api.AudienceList.to_json( + analytics_data_api.AudienceList() ) - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.SheetExportAudienceListResponse() + post.return_value = analytics_data_api.AudienceList() - client.sheet_export_audience_list( + client.get_audience_list( request, metadata=[ ("key", "val"), @@ -7965,9 +8678,8 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_sheet_export_audience_list_rest_bad_request( - transport: str = "rest", - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7987,10 +8699,10 @@ def test_sheet_export_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.sheet_export_audience_list(request) + client.get_audience_list(request) -def test_sheet_export_audience_list_rest_flattened(): +def test_get_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7999,7 +8711,7 @@ def test_sheet_export_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -8014,27 +8726,24 @@ def test_sheet_export_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.sheet_export_audience_list(**mock_args) + client.get_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" - % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, args[1], ) -def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8043,13 +8752,13 @@ def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), name="name_value", ) -def test_sheet_export_audience_list_rest_error(): +def test_get_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8058,60 +8767,44 @@ def test_sheet_export_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetAudienceListRequest, + analytics_data_api.ListAudienceListsRequest, dict, ], ) -def test_get_audience_list_rest(request_type): +def test_list_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8125,7 +8818,9 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience_list in client._transport._wrapped_methods + assert ( + client._transport.list_audience_lists in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8133,29 +8828,29 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_audience_list + client._transport.list_audience_lists ] = mock_rpc request = {} - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_audience_list_rest_required_fields( - request_type=analytics_data_api.GetAudienceListRequest, +def test_list_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8166,21 +8861,28 @@ def test_get_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8189,7 +8891,7 @@ def test_get_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8210,30 +8912,38 @@ def test_get_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_audience_list_rest_unset_required_fields(): +def test_list_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_audience_list_rest_interceptors(null_interceptor): +def test_list_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8246,14 +8956,14 @@ def test_get_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetAudienceListRequest.pb( - analytics_data_api.GetAudienceListRequest() + pb_message = analytics_data_api.ListAudienceListsRequest.pb( + analytics_data_api.ListAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -8265,19 +8975,21 @@ def test_get_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.AudienceList.to_json( - analytics_data_api.AudienceList() + req.return_value._content = ( + analytics_data_api.ListAudienceListsResponse.to_json( + analytics_data_api.ListAudienceListsResponse() + ) ) - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.AudienceList() + post.return_value = analytics_data_api.ListAudienceListsResponse() - client.get_audience_list( + client.list_audience_lists( request, metadata=[ ("key", "val"), @@ -8289,8 +9001,8 @@ def test_get_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest +def test_list_audience_lists_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8298,7 +9010,7 @@ def test_get_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8310,10 +9022,10 @@ def test_get_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_audience_list(request) + client.list_audience_lists(request) -def test_get_audience_list_rest_flattened(): +def test_list_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8322,14 +9034,14 @@ def test_get_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/audienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -8337,24 +9049,24 @@ def test_get_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_audience_list(**mock_args) + client.list_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, args[1], ) -def test_get_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8363,26 +9075,83 @@ def test_get_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience_list( - analytics_data_api.GetAudienceListRequest(), - name="name_value", + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent="parent_value", ) -def test_get_audience_list_rest_error(): +def test_list_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) + + pages = list(client.list_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListAudienceListsRequest, + analytics_data_api.CreateRecurringAudienceListRequest, dict, ], ) -def test_list_audience_lists_rest(request_type): +def test_create_recurring_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8390,32 +9159,123 @@ def test_list_audience_lists_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} + request_init["recurring_audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "active_days_remaining": 2213, + "audience_lists": ["audience_lists_value1", "audience_lists_value2"], + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ + "recurring_audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "recurring_audience_list" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["recurring_audience_list"][field])): + del request_init["recurring_audience_list"][field][i][subfield] + else: + del request_init["recurring_audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] -def test_list_audience_lists_rest_use_cached_wrapped_rpc(): +def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8430,7 +9290,8 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_audience_lists in client._transport._wrapped_methods + client._transport.create_recurring_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -8439,24 +9300,24 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_audience_lists + client._transport.create_recurring_audience_list ] = mock_rpc request = {} - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListAudienceListsRequest, +def test_create_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -8472,7 +9333,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8481,14 +9342,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8502,7 +9356,7 @@ def test_list_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8514,47 +9368,50 @@ def test_list_audience_lists_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_audience_lists_rest_unset_required_fields(): +def test_create_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "recurringAudienceList", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_audience_lists_rest_interceptors(null_interceptor): +def test_create_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8567,14 +9424,16 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_recurring_audience_list", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_create_recurring_audience_list", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListAudienceListsRequest.pb( - analytics_data_api.ListAudienceListsRequest() + pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( + analytics_data_api.CreateRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -8586,21 +9445,19 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListAudienceListsResponse.to_json( - analytics_data_api.ListAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( + analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.CreateRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListAudienceListsResponse() + post.return_value = analytics_data_api.RecurringAudienceList() - client.list_audience_lists( + client.create_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -8612,8 +9469,9 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_audience_lists_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest +def test_create_recurring_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8633,10 +9491,10 @@ def test_list_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_audience_lists(request) + client.create_recurring_audience_list(request) -def test_list_audience_lists_rest_flattened(): +def test_create_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8645,7 +9503,7 @@ def test_list_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -8653,6 +9511,9 @@ def test_list_audience_lists_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), ) mock_args.update(sample_request) @@ -8660,24 +9521,25 @@ def test_list_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_audience_lists(**mock_args) + client.create_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + % client.transport._host, args[1], ) -def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8686,173 +9548,36 @@ def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_audience_lists( - analytics_data_api.ListAudienceListsRequest(), + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), parent="parent_value", - ) - - -def test_list_audience_lists_rest_pager(transport: str = "rest"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" ), ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) - - pages = list(client.list_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.CreateRecurringAudienceListRequest, - dict, - ], -) -def test_create_recurring_audience_list_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["recurring_audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "active_days_remaining": 2213, - "audience_lists": ["audience_lists_value1", "audience_lists_value2"], - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ - "recurring_audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "recurring_audience_list" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_create_recurring_audience_list_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["recurring_audience_list"][field])): - del request_init["recurring_audience_list"][field][i][subfield] - else: - del request_init["recurring_audience_list"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetRecurringAudienceListRequest, + dict, + ], +) +def test_get_recurring_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8875,7 +9600,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) # Establish that the response is the type that we expect. assert isinstance(response, analytics_data_api.RecurringAudienceList) @@ -8886,7 +9611,7 @@ def get_message_fields(field): assert response.audience_lists == ["audience_lists_value"] -def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8901,7 +9626,7 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list in client._transport._wrapped_methods ) @@ -8911,29 +9636,29 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list ] = mock_rpc request = {} - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateRecurringAudienceListRequest, +def test_get_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8944,21 +9669,21 @@ def test_create_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8979,10 +9704,9 @@ def test_create_recurring_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8995,34 +9719,24 @@ def test_create_recurring_audience_list_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_recurring_audience_list_rest_unset_required_fields(): +def test_get_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "recurringAudienceList", - ) - ) - ) + unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_recurring_audience_list_rest_interceptors(null_interceptor): +def test_get_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9035,16 +9749,14 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( - analytics_data_api.CreateRecurringAudienceListRequest() + pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( + analytics_data_api.GetRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -9060,7 +9772,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9068,7 +9780,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = analytics_data_api.RecurringAudienceList() - client.create_recurring_audience_list( + client.get_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -9080,9 +9792,9 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_recurring_audience_list_rest_bad_request( +def test_get_recurring_audience_list_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.CreateRecurringAudienceListRequest, + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9090,7 +9802,7 @@ def test_create_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9102,10 +9814,10 @@ def test_create_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) -def test_create_recurring_audience_list_rest_flattened(): +def test_get_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9117,14 +9829,11 @@ def test_create_recurring_audience_list_rest_flattened(): return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + name="name_value", ) mock_args.update(sample_request) @@ -9137,20 +9846,20 @@ def test_create_recurring_audience_list_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_recurring_audience_list(**mock_args) + client.get_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" % client.transport._host, args[1], ) -def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9159,16 +9868,13 @@ def test_create_recurring_audience_list_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_recurring_audience_list( - analytics_data_api.CreateRecurringAudienceListRequest(), - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name="name_value", ) -def test_create_recurring_audience_list_rest_error(): +def test_get_recurring_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9177,52 +9883,46 @@ def test_create_recurring_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetRecurringAudienceListRequest, + analytics_data_api.ListRecurringAudienceListsRequest, dict, ], ) -def test_get_recurring_audience_list_rest(request_type): +def test_list_recurring_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9237,7 +9937,7 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists in client._transport._wrapped_methods ) @@ -9247,29 +9947,29 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists ] = mock_rpc request = {} - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.GetRecurringAudienceListRequest, +def test_list_recurring_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9280,21 +9980,28 @@ def test_get_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9303,7 +10010,7 @@ def test_get_recurring_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9324,30 +10031,42 @@ def test_get_recurring_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_recurring_audience_list_rest_unset_required_fields(): +def test_list_recurring_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_recurring_audience_list_rest_interceptors(null_interceptor): +def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9360,14 +10079,16 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_recurring_audience_lists", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_list_recurring_audience_lists", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( - analytics_data_api.GetRecurringAudienceListRequest() + pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( + analytics_data_api.ListRecurringAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -9379,19 +10100,21 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( - analytics_data_api.RecurringAudienceList() + req.return_value._content = ( + analytics_data_api.ListRecurringAudienceListsResponse.to_json( + analytics_data_api.ListRecurringAudienceListsResponse() + ) ) - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RecurringAudienceList() + post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - client.get_recurring_audience_list( + client.list_recurring_audience_lists( request, metadata=[ ("key", "val"), @@ -9403,9 +10126,9 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_recurring_audience_list_rest_bad_request( +def test_list_recurring_audience_lists_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.GetRecurringAudienceListRequest, + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9413,7 +10136,7 @@ def test_get_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9425,10 +10148,10 @@ def test_get_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) -def test_get_recurring_audience_list_rest_flattened(): +def test_list_recurring_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9437,14 +10160,14 @@ def test_get_recurring_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -9452,25 +10175,27 @@ def test_get_recurring_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_recurring_audience_list(**mock_args) + client.list_recurring_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" % client.transport._host, args[1], ) -def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9479,61 +10204,119 @@ def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_recurring_audience_list( - analytics_data_api.GetRecurringAudienceListRequest(), - name="name_value", + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent="parent_value", ) -def test_get_recurring_audience_list_rest_error(): +def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_recurring_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, analytics_data_api.RecurringAudienceList) for i in results + ) + + pages = list(client.list_recurring_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListRecurringAudienceListsRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_list_recurring_audience_lists_rest(request_type): +def test_get_property_quotas_snapshot_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9548,7 +10331,7 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods ) @@ -9558,29 +10341,29 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_recurring_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListRecurringAudienceListsRequest, +def test_get_property_quotas_snapshot_rest_required_fields( + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9591,28 +10374,21 @@ def test_list_recurring_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9621,7 +10397,7 @@ def test_list_recurring_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9642,42 +10418,30 @@ def test_list_recurring_audience_lists_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_recurring_audience_lists_rest_unset_required_fields(): +def test_get_property_quotas_snapshot_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_property_quotas_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): +def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9691,15 +10455,14 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_recurring_audience_lists", + "post_get_property_quotas_snapshot", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_list_recurring_audience_lists", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_property_quotas_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( - analytics_data_api.ListRecurringAudienceListsRequest() + pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( + analytics_data_api.GetPropertyQuotasSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -9711,21 +10474,19 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListRecurringAudienceListsResponse.to_json( - analytics_data_api.ListRecurringAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.PropertyQuotasSnapshot.to_json( + analytics_data_api.PropertyQuotasSnapshot() ) - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + post.return_value = analytics_data_api.PropertyQuotasSnapshot() - client.list_recurring_audience_lists( + client.get_property_quotas_snapshot( request, metadata=[ ("key", "val"), @@ -9737,9 +10498,9 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_recurring_audience_lists_rest_bad_request( +def test_get_property_quotas_snapshot_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.ListRecurringAudienceListsRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9747,7 +10508,7 @@ def test_list_recurring_audience_lists_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9759,10 +10520,10 @@ def test_list_recurring_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) -def test_list_recurring_audience_lists_rest_flattened(): +def test_get_property_quotas_snapshot_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9771,14 +10532,14 @@ def test_list_recurring_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/propertyQuotasSnapshot"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -9786,27 +10547,25 @@ def test_list_recurring_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_recurring_audience_lists(**mock_args) + client.get_property_quotas_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/propertyQuotasSnapshot}" % client.transport._host, args[1], ) -def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9815,77 +10574,17 @@ def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_recurring_audience_lists( - analytics_data_api.ListRecurringAudienceListsRequest(), - parent="parent_value", + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) -def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_error(): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_recurring_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, analytics_data_api.RecurringAudienceList) for i in results - ) - - pages = list(client.list_recurring_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", @@ -11498,6 +12197,7 @@ def test_alpha_analytics_data_base_transport(): "create_recurring_audience_list", "get_recurring_audience_list", "list_recurring_audience_lists", + "get_property_quotas_snapshot", "create_report_task", "query_report_task", "get_report_task", @@ -11835,6 +12535,9 @@ def test_alpha_analytics_data_client_transport_session_collision(transport_name) session1 = client1.transport.list_recurring_audience_lists._session session2 = client2.transport.list_recurring_audience_lists._session assert session1 != session2 + session1 = client1.transport.get_property_quotas_snapshot._session + session2 = client2.transport.get_property_quotas_snapshot._session + assert session1 != session2 session1 = client1.transport.create_report_task._session session2 = client2.transport.create_report_task._session assert session1 != session2 @@ -12032,9 +12735,29 @@ def test_parse_audience_list_path(): assert expected == actual -def test_recurring_audience_list_path(): +def test_property_quotas_snapshot_path(): property = "oyster" - recurring_audience_list = "nudibranch" + expected = "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + actual = AlphaAnalyticsDataClient.property_quotas_snapshot_path(property) + assert expected == actual + + +def test_parse_property_quotas_snapshot_path(): + expected = { + "property": "nudibranch", + } + path = AlphaAnalyticsDataClient.property_quotas_snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path(path) + assert expected == actual + + +def test_recurring_audience_list_path(): + property = "cuttlefish" + recurring_audience_list = "mussel" expected = ( "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format( property=property, @@ -12049,8 +12772,8 @@ def test_recurring_audience_list_path(): def test_parse_recurring_audience_list_path(): expected = { - "property": "cuttlefish", - "recurring_audience_list": "mussel", + "property": "winkle", + "recurring_audience_list": "nautilus", } path = AlphaAnalyticsDataClient.recurring_audience_list_path(**expected) @@ -12060,8 +12783,8 @@ def test_parse_recurring_audience_list_path(): def test_report_task_path(): - property = "winkle" - report_task = "nautilus" + property = "scallop" + report_task = "abalone" expected = "properties/{property}/reportTasks/{report_task}".format( property=property, report_task=report_task, @@ -12072,8 +12795,8 @@ def test_report_task_path(): def test_parse_report_task_path(): expected = { - "property": "scallop", - "report_task": "abalone", + "property": "squid", + "report_task": "clam", } path = AlphaAnalyticsDataClient.report_task_path(**expected) @@ -12083,7 +12806,7 @@ def test_parse_report_task_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -12093,7 +12816,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "octopus", } path = AlphaAnalyticsDataClient.common_billing_account_path(**expected) @@ -12103,7 +12826,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -12113,7 +12836,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nudibranch", } path = AlphaAnalyticsDataClient.common_folder_path(**expected) @@ -12123,7 +12846,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -12133,7 +12856,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "mussel", } path = AlphaAnalyticsDataClient.common_organization_path(**expected) @@ -12143,7 +12866,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -12153,7 +12876,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nautilus", } path = AlphaAnalyticsDataClient.common_project_path(**expected) @@ -12163,8 +12886,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -12175,8 +12898,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "squid", + "location": "clam", } path = AlphaAnalyticsDataClient.common_location_path(**expected) From 2cc1550492a2b78ed7240aab84a8449de5e5afa2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:37:31 -0400 Subject: [PATCH 28/59] feat: [google-maps-places] add `routing_parameters` to SearchNearbyRequest and SearchTextRequest (#13092) BEGIN_COMMIT_OVERRIDE feat: action for publishing data profiles to SecOps (formelly known as Chronicle) feat: action for publishing data profiles to Security Command Center feat: discovery configs for AWS S3 buckets docs: small improvements and clarifications END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add `search_along_route_parameters` to SearchTextRequest feat: add `routing_summaries` to SearchNearbyResponse and SearchTextResponse docs: A comment for field `contextual_contents` in message `.google.maps.places.v1.SearchTextResponse` is changed to be more assertive docs: A comment for field `open_now` in message `.google.maps.places.v1.Place` is changed to clarify what it means with new-since-previous-comment current and secondary opening hours fields Introduces search along route and trip time features. PiperOrigin-RevId: 675760040 Source-Link: https://github.com/googleapis/googleapis/commit/42219196a8f02e2e17198ad88f4369324dfe1f09 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9caa94e6a392aed9fcca81cb8091ffe9cdfce033 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcGxhY2VzLy5Pd2xCb3QueWFtbCIsImgiOiI5Y2FhOTRlNmEzOTJhZWQ5ZmNjYTgxY2I4MDkxZmZlOWNkZmNlMDMzIn0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/maps/places/__init__.py | 12 ++ .../google/maps/places_v1/__init__.py | 12 ++ .../places_v1/services/places/async_client.py | 1 + .../maps/places_v1/services/places/client.py | 1 + .../google/maps/places_v1/types/__init__.py | 12 ++ .../google/maps/places_v1/types/place.py | 10 +- .../maps/places_v1/types/places_service.py | 135 +++++++++++++++++- .../google/maps/places_v1/types/polyline.py | 61 ++++++++ .../maps/places_v1/types/route_modifiers.py | 75 ++++++++++ .../places_v1/types/routing_preference.py | 70 +++++++++ .../maps/places_v1/types/routing_summary.py | 81 +++++++++++ .../maps/places_v1/types/travel_mode.py | 62 ++++++++ .../scripts/fixup_places_v1_keywords.py | 4 +- .../tests/unit/gapic/places_v1/test_places.py | 5 + 14 files changed, 535 insertions(+), 6 deletions(-) create mode 100644 packages/google-maps-places/google/maps/places_v1/types/polyline.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/routing_preference.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/routing_summary.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/travel_mode.py diff --git a/packages/google-maps-places/google/maps/places/__init__.py b/packages/google-maps-places/google/maps/places/__init__.py index ba2545071c8b..28c346d421a8 100644 --- a/packages/google-maps-places/google/maps/places/__init__.py +++ b/packages/google-maps-places/google/maps/places/__init__.py @@ -34,13 +34,19 @@ GetPhotoMediaRequest, GetPlaceRequest, PhotoMedia, + RoutingParameters, SearchNearbyRequest, SearchNearbyResponse, SearchTextRequest, SearchTextResponse, ) +from google.maps.places_v1.types.polyline import Polyline from google.maps.places_v1.types.reference import References from google.maps.places_v1.types.review import Review +from google.maps.places_v1.types.route_modifiers import RouteModifiers +from google.maps.places_v1.types.routing_preference import RoutingPreference +from google.maps.places_v1.types.routing_summary import RoutingSummary +from google.maps.places_v1.types.travel_mode import TravelMode __all__ = ( "PlacesClient", @@ -60,10 +66,16 @@ "GetPhotoMediaRequest", "GetPlaceRequest", "PhotoMedia", + "RoutingParameters", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "Polyline", "References", "Review", + "RouteModifiers", + "RoutingPreference", + "RoutingSummary", + "TravelMode", ) diff --git a/packages/google-maps-places/google/maps/places_v1/__init__.py b/packages/google-maps-places/google/maps/places_v1/__init__.py index bb2075cda07a..106940dbdfc2 100644 --- a/packages/google-maps-places/google/maps/places_v1/__init__.py +++ b/packages/google-maps-places/google/maps/places_v1/__init__.py @@ -33,13 +33,19 @@ GetPhotoMediaRequest, GetPlaceRequest, PhotoMedia, + RoutingParameters, SearchNearbyRequest, SearchNearbyResponse, SearchTextRequest, SearchTextResponse, ) +from .types.polyline import Polyline from .types.reference import References from .types.review import Review +from .types.route_modifiers import RouteModifiers +from .types.routing_preference import RoutingPreference +from .types.routing_summary import RoutingSummary +from .types.travel_mode import TravelMode __all__ = ( "PlacesAsyncClient", @@ -58,11 +64,17 @@ "PhotoMedia", "Place", "PlacesClient", + "Polyline", "PriceLevel", "References", "Review", + "RouteModifiers", + "RoutingParameters", + "RoutingPreference", + "RoutingSummary", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "TravelMode", ) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py index 0e27906f0dc2..46a0d6df67af 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py @@ -54,6 +54,7 @@ place, places_service, review, + routing_summary, ) from .client import PlacesClient diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/client.py b/packages/google-maps-places/google/maps/places_v1/services/places/client.py index bbf53396b61b..7db691a4a89b 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/client.py @@ -60,6 +60,7 @@ place, places_service, review, + routing_summary, ) from .transports.base import DEFAULT_CLIENT_INFO, PlacesTransport diff --git a/packages/google-maps-places/google/maps/places_v1/types/__init__.py b/packages/google-maps-places/google/maps/places_v1/types/__init__.py index 99b294c1dc25..5b1baab78073 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/__init__.py +++ b/packages/google-maps-places/google/maps/places_v1/types/__init__.py @@ -27,13 +27,19 @@ GetPhotoMediaRequest, GetPlaceRequest, PhotoMedia, + RoutingParameters, SearchNearbyRequest, SearchNearbyResponse, SearchTextRequest, SearchTextResponse, ) +from .polyline import Polyline from .reference import References from .review import Review +from .route_modifiers import RouteModifiers +from .routing_preference import RoutingPreference +from .routing_summary import RoutingSummary +from .travel_mode import TravelMode __all__ = ( "AuthorAttribution", @@ -51,10 +57,16 @@ "GetPhotoMediaRequest", "GetPlaceRequest", "PhotoMedia", + "RoutingParameters", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "Polyline", "References", "Review", + "RouteModifiers", + "RoutingPreference", + "RoutingSummary", + "TravelMode", ) diff --git a/packages/google-maps-places/google/maps/places_v1/types/place.py b/packages/google-maps-places/google/maps/places_v1/types/place.py index 99c9e8f693e9..0ce1df663c1b 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/place.py +++ b/packages/google-maps-places/google/maps/places_v1/types/place.py @@ -439,9 +439,13 @@ class OpeningHours(proto.Message): Attributes: open_now (bool): - Is this place open right now? Always present - unless we lack time-of-day or timezone data for - these opening hours. + Whether the opening hours period is currently + active. For regular opening hours and current + opening hours, this field means whether the + place is open. For secondary opening hours and + current secondary opening hours, this field + means whether the secondary hours of this place + is active. This field is a member of `oneof`_ ``_open_now``. periods (MutableSequence[google.maps.places_v1.types.Place.OpeningHours.Period]): diff --git a/packages/google-maps-places/google/maps/places_v1/types/places_service.py b/packages/google-maps-places/google/maps/places_v1/types/places_service.py index c02ec339b860..476c2fbe06c8 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/places_service.py +++ b/packages/google-maps-places/google/maps/places_v1/types/places_service.py @@ -22,11 +22,17 @@ import proto # type: ignore from google.maps.places_v1.types import contextual_content, ev_charging, geometry +from google.maps.places_v1.types import routing_preference as gmp_routing_preference from google.maps.places_v1.types import place as gmp_place +from google.maps.places_v1.types import polyline as gmp_polyline +from google.maps.places_v1.types import route_modifiers as gmp_route_modifiers +from google.maps.places_v1.types import routing_summary +from google.maps.places_v1.types import travel_mode as gmp_travel_mode __protobuf__ = proto.module( package="google.maps.places.v1", manifest={ + "RoutingParameters", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", @@ -40,6 +46,50 @@ ) +class RoutingParameters(proto.Message): + r"""Parameters to configure the routing calculations to the + places in the response, both along a route (where result ranking + will be influenced) and for calculating travel times on results. + + Attributes: + origin (google.type.latlng_pb2.LatLng): + Optional. An explicit routing origin that + overrides the origin defined in the polyline. By + default, the polyline origin is used. + travel_mode (google.maps.places_v1.types.TravelMode): + Optional. The travel mode. + route_modifiers (google.maps.places_v1.types.RouteModifiers): + Optional. The route modifiers. + routing_preference (google.maps.places_v1.types.RoutingPreference): + Optional. Specifies how to compute the routing summaries. + The server attempts to use the selected routing preference + to compute the route. The traffic aware routing preference + is only available for the ``DRIVE`` or ``TWO_WHEELER`` + ``travelMode``. + """ + + origin: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=1, + message=latlng_pb2.LatLng, + ) + travel_mode: gmp_travel_mode.TravelMode = proto.Field( + proto.ENUM, + number=2, + enum=gmp_travel_mode.TravelMode, + ) + route_modifiers: gmp_route_modifiers.RouteModifiers = proto.Field( + proto.MESSAGE, + number=3, + message=gmp_route_modifiers.RouteModifiers, + ) + routing_preference: gmp_routing_preference.RoutingPreference = proto.Field( + proto.ENUM, + number=4, + enum=gmp_routing_preference.RoutingPreference, + ) + + class SearchNearbyRequest(proto.Message): r"""Request proto for Search Nearby. @@ -163,6 +213,9 @@ class SearchNearbyRequest(proto.Message): Required. The region to search. rank_preference (google.maps.places_v1.types.SearchNearbyRequest.RankPreference): How results will be ranked in the response. + routing_parameters (google.maps.places_v1.types.RoutingParameters): + Optional. Parameters that affect the routing + to the search results. """ class RankPreference(proto.Enum): @@ -238,6 +291,11 @@ class LocationRestriction(proto.Message): number=9, enum=RankPreference, ) + routing_parameters: "RoutingParameters" = proto.Field( + proto.MESSAGE, + number=10, + message="RoutingParameters", + ) class SearchNearbyResponse(proto.Message): @@ -248,6 +306,14 @@ class SearchNearbyResponse(proto.Message): A list of places that meets user's requirements like places types, number of places and specific location restriction. + routing_summaries (MutableSequence[google.maps.places_v1.types.RoutingSummary]): + A list of routing summaries where each entry + associates to the corresponding place in the + same index in the places field. If the routing + summary is not available for one of the places, + it will contain an empty entry. This list should + have as many entries as the list of places if + requested. """ places: MutableSequence[gmp_place.Place] = proto.RepeatedField( @@ -255,6 +321,13 @@ class SearchNearbyResponse(proto.Message): number=1, message=gmp_place.Place, ) + routing_summaries: MutableSequence[ + routing_summary.RoutingSummary + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=routing_summary.RoutingSummary, + ) class SearchTextRequest(proto.Message): @@ -327,6 +400,12 @@ class SearchTextRequest(proto.Message): ev_options (google.maps.places_v1.types.SearchTextRequest.EVOptions): Optional. Set the searchable EV options of a place search request. + routing_parameters (google.maps.places_v1.types.RoutingParameters): + Optional. Additional parameters for routing + to results. + search_along_route_parameters (google.maps.places_v1.types.SearchTextRequest.SearchAlongRouteParameters): + Optional. Additional parameters proto for + searching along a route. """ class RankPreference(proto.Enum): @@ -445,6 +524,35 @@ class EVOptions(proto.Message): enum=ev_charging.EVConnectorType, ) + class SearchAlongRouteParameters(proto.Message): + r"""Specifies a precalculated polyline from the `Routes + API `__ + defining the route to search. Searching along a route is similar to + using the ``locationBias`` or ``locationRestriction`` request option + to bias the search results. However, while the ``locationBias`` and + ``locationRestriction`` options let you specify a region to bias the + search results, this option lets you bias the results along a trip + route. + + Results are not guaranteed to be along the route provided, but + rather are ranked within the search area defined by the polyline + and, optionally, by the ``locationBias`` or ``locationRestriction`` + based on minimal detour times from origin to destination. The + results might be along an alternate route, especially if the + provided polyline does not define an optimal route from origin to + destination. + + Attributes: + polyline (google.maps.places_v1.types.Polyline): + Required. The route polyline. + """ + + polyline: gmp_polyline.Polyline = proto.Field( + proto.MESSAGE, + number=1, + message=gmp_polyline.Polyline, + ) + text_query: str = proto.Field( proto.STRING, number=1, @@ -502,6 +610,16 @@ class EVOptions(proto.Message): number=15, message=EVOptions, ) + routing_parameters: "RoutingParameters" = proto.Field( + proto.MESSAGE, + number=16, + message="RoutingParameters", + ) + search_along_route_parameters: SearchAlongRouteParameters = proto.Field( + proto.MESSAGE, + number=17, + message=SearchAlongRouteParameters, + ) class SearchTextResponse(proto.Message): @@ -511,6 +629,14 @@ class SearchTextResponse(proto.Message): places (MutableSequence[google.maps.places_v1.types.Place]): A list of places that meet the user's text search criteria. + routing_summaries (MutableSequence[google.maps.places_v1.types.RoutingSummary]): + A list of routing summaries where each entry + associates to the corresponding place in the + same index in the places field. If the routing + summary is not available for one of the places, + it will contain an empty entry. This list will + have as many entries as the list of places if + requested. contextual_contents (MutableSequence[google.maps.places_v1.types.ContextualContent]): Experimental: See https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative @@ -522,7 +648,7 @@ class SearchTextResponse(proto.Message): in the request are preferred. If the contextual content is not available for one of the places, it will return non-contextual content. It will be empty only when the - content is unavailable for this place. This list should have + content is unavailable for this place. This list will have as many entries as the list of places if requested. """ @@ -531,6 +657,13 @@ class SearchTextResponse(proto.Message): number=1, message=gmp_place.Place, ) + routing_summaries: MutableSequence[ + routing_summary.RoutingSummary + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=routing_summary.RoutingSummary, + ) contextual_contents: MutableSequence[ contextual_content.ContextualContent ] = proto.RepeatedField( diff --git a/packages/google-maps-places/google/maps/places_v1/types/polyline.py b/packages/google-maps-places/google/maps/places_v1/types/polyline.py new file mode 100644 index 000000000000..c5f14fdb5642 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/polyline.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "Polyline", + }, +) + + +class Polyline(proto.Message): + r"""A route polyline. Only supports an `encoded + polyline `__, + which can be passed as a string and includes compression with + minimal lossiness. This is the Routes API default output. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + encoded_polyline (str): + An `encoded + polyline `__, + as returned by the `Routes API by + default `__. + See the + `encoder `__ + and + `decoder `__ + tools. + + This field is a member of `oneof`_ ``polyline_type``. + """ + + encoded_polyline: str = proto.Field( + proto.STRING, + number=1, + oneof="polyline_type", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py b/packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py new file mode 100644 index 000000000000..59e720aa7696 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "RouteModifiers", + }, +) + + +class RouteModifiers(proto.Message): + r"""Encapsulates a set of optional conditions to satisfy when + calculating the routes. + + Attributes: + avoid_tolls (bool): + Optional. When set to true, avoids toll roads where + reasonable, giving preference to routes not containing toll + roads. Applies only to the ``DRIVE`` and ``TWO_WHEELER`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + avoid_highways (bool): + Optional. When set to true, avoids highways where + reasonable, giving preference to routes not containing + highways. Applies only to the ``DRIVE`` and ``TWO_WHEELER`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + avoid_ferries (bool): + Optional. When set to true, avoids ferries where reasonable, + giving preference to routes not containing ferries. Applies + only to the ``DRIVE`` and ``TWO_WHEELER`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + avoid_indoor (bool): + Optional. When set to true, avoids navigating indoors where + reasonable, giving preference to routes not containing + indoor navigation. Applies only to the ``WALK`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + """ + + avoid_tolls: bool = proto.Field( + proto.BOOL, + number=1, + ) + avoid_highways: bool = proto.Field( + proto.BOOL, + number=2, + ) + avoid_ferries: bool = proto.Field( + proto.BOOL, + number=3, + ) + avoid_indoor: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/routing_preference.py b/packages/google-maps-places/google/maps/places_v1/types/routing_preference.py new file mode 100644 index 000000000000..786d01140a2f --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/routing_preference.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "RoutingPreference", + }, +) + + +class RoutingPreference(proto.Enum): + r"""A set of values that specify factors to take into + consideration when calculating the route. + + Values: + ROUTING_PREFERENCE_UNSPECIFIED (0): + No routing preference specified. Default to + ``TRAFFIC_UNAWARE``. + TRAFFIC_UNAWARE (1): + Computes routes without taking live traffic conditions into + consideration. Suitable when traffic conditions don't matter + or are not applicable. Using this value produces the lowest + latency. Note: For + [``TravelMode``][google.maps.places.v1.TravelMode] ``DRIVE`` + and ``TWO_WHEELER``, the route and duration chosen are based + on road network and average time-independent traffic + conditions, not current road conditions. Consequently, + routes may include roads that are temporarily closed. + Results for a given request may vary over time due to + changes in the road network, updated average traffic + conditions, and the distributed nature of the service. + Results may also vary between nearly-equivalent routes at + any time or frequency. + TRAFFIC_AWARE (2): + Calculates routes taking live traffic conditions into + consideration. In contrast to ``TRAFFIC_AWARE_OPTIMAL``, + some optimizations are applied to significantly reduce + latency. + TRAFFIC_AWARE_OPTIMAL (3): + Calculates the routes taking live traffic + conditions into consideration, without applying + most performance optimizations. Using this value + produces the highest latency. + """ + ROUTING_PREFERENCE_UNSPECIFIED = 0 + TRAFFIC_UNAWARE = 1 + TRAFFIC_AWARE = 2 + TRAFFIC_AWARE_OPTIMAL = 3 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/routing_summary.py b/packages/google-maps-places/google/maps/places_v1/types/routing_summary.py new file mode 100644 index 000000000000..7575fe18ed90 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/routing_summary.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "RoutingSummary", + }, +) + + +class RoutingSummary(proto.Message): + r"""The duration and distance from the routing origin to a place in the + response, and a second leg from that place to the destination, if + requested. Note: Adding ``routingSummaries`` in the field mask + without also including either the ``routingParameters.origin`` + parameter or the + ``searchAlongRouteParameters.polyline.encodedPolyline`` parameter in + the request causes an error. + + Attributes: + legs (MutableSequence[google.maps.places_v1.types.RoutingSummary.Leg]): + The legs of the trip. + + When you calculate travel duration and distance from a set + origin, ``legs`` contains a single leg containing the + duration and distance from the origin to the destination. + When you do a search along route, ``legs`` contains two + legs: one from the origin to place, and one from the place + to the destination. + """ + + class Leg(proto.Message): + r"""A leg is a single portion of a journey from one location to + another. + + Attributes: + duration (google.protobuf.duration_pb2.Duration): + The time it takes to complete this leg of the + trip. + distance_meters (int): + The distance of this leg of the trip. + """ + + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + distance_meters: int = proto.Field( + proto.INT32, + number=2, + ) + + legs: MutableSequence[Leg] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Leg, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/travel_mode.py b/packages/google-maps-places/google/maps/places_v1/types/travel_mode.py new file mode 100644 index 000000000000..bcb42ef66089 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/travel_mode.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "TravelMode", + }, +) + + +class TravelMode(proto.Enum): + r"""Travel mode options. These options map to what `Routes API + offers `__. + + Values: + TRAVEL_MODE_UNSPECIFIED (0): + No travel mode specified. Defaults to ``DRIVE``. + DRIVE (1): + Travel by passenger car. + BICYCLE (2): + Travel by bicycle. Not supported with + ``search_along_route_parameters``. + WALK (3): + Travel by walking. Not supported with + ``search_along_route_parameters``. + TWO_WHEELER (4): + Motorized two wheeled vehicles of all kinds such as scooters + and motorcycles. Note that this is distinct from the + ``BICYCLE`` travel mode which covers human-powered + transport. Not supported with + ``search_along_route_parameters``. Only supported in those + countries listed at `Countries and regions supported for + two-wheeled + vehicles `__. + """ + TRAVEL_MODE_UNSPECIFIED = 0 + DRIVE = 1 + BICYCLE = 2 + WALK = 3 + TWO_WHEELER = 4 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/scripts/fixup_places_v1_keywords.py b/packages/google-maps-places/scripts/fixup_places_v1_keywords.py index 057be614cc45..886ede01a438 100644 --- a/packages/google-maps-places/scripts/fixup_places_v1_keywords.py +++ b/packages/google-maps-places/scripts/fixup_places_v1_keywords.py @@ -42,8 +42,8 @@ class placesCallTransformer(cst.CSTTransformer): 'autocomplete_places': ('input', 'location_bias', 'location_restriction', 'included_primary_types', 'included_region_codes', 'language_code', 'region_code', 'origin', 'input_offset', 'include_query_predictions', 'session_token', ), 'get_photo_media': ('name', 'max_width_px', 'max_height_px', 'skip_http_redirect', ), 'get_place': ('name', 'language_code', 'region_code', 'session_token', ), - 'search_nearby': ('location_restriction', 'language_code', 'region_code', 'included_types', 'excluded_types', 'included_primary_types', 'excluded_primary_types', 'max_result_count', 'rank_preference', ), - 'search_text': ('text_query', 'language_code', 'region_code', 'rank_preference', 'included_type', 'open_now', 'min_rating', 'max_result_count', 'price_levels', 'strict_type_filtering', 'location_bias', 'location_restriction', 'ev_options', ), + 'search_nearby': ('location_restriction', 'language_code', 'region_code', 'included_types', 'excluded_types', 'included_primary_types', 'excluded_primary_types', 'max_result_count', 'rank_preference', 'routing_parameters', ), + 'search_text': ('text_query', 'language_code', 'region_code', 'rank_preference', 'included_type', 'open_now', 'min_rating', 'max_result_count', 'price_levels', 'strict_type_filtering', 'location_bias', 'location_restriction', 'ev_options', 'routing_parameters', 'search_along_route_parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py index 0dbb9efbb41f..c7fa3af8519d 100644 --- a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py @@ -59,7 +59,12 @@ photo, place, places_service, + polyline, review, + route_modifiers, + routing_preference, + routing_summary, + travel_mode, ) From afcf7cbe57d6e0f183a113ba03bba9c288052969 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:41:04 -0400 Subject: [PATCH 29/59] feat: [google-cloud-dlp] action for publishing data profiles to SecOps (formelly known as Chronicle) (#13094) BEGIN_COMMIT_OVERRIDE feat: action for publishing data profiles to SecOps (formelly known as Chronicle) feat: action for publishing data profiles to Security Command Center feat: discovery configs for AWS S3 buckets docs: small improvements and clarifications END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: action for publishing data profiles to Security Command Center feat: discovery configs for AWS S3 buckets docs: small improvements and clarifications PiperOrigin-RevId: 676184918 Source-Link: https://github.com/googleapis/googleapis/commit/607c2ae82620153880a66898bcbf0a46d38d7d10 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f92f1ae19895930aca6ad11aaeb1d4d9cd7a61a1 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRscC8uT3dsQm90LnlhbWwiLCJoIjoiZjkyZjFhZTE5ODk1OTMwYWNhNmFkMTFhYWViMWQ0ZDljZDdhNjFhMSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/dlp/__init__.py | 28 + .../google/cloud/dlp/gapic_version.py | 2 +- .../google/cloud/dlp_v2/__init__.py | 28 + .../google/cloud/dlp_v2/gapic_version.py | 2 +- .../services/dlp_service/async_client.py | 1 + .../dlp_v2/services/dlp_service/client.py | 1 + .../services/dlp_service/transports/rest.py | 1 + .../google/cloud/dlp_v2/types/__init__.py | 28 + .../google/cloud/dlp_v2/types/dlp.py | 563 +++++++++++++++++- ...nippet_metadata_google.privacy.dlp.v2.json | 2 +- 10 files changed, 648 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py index 4a4594889a9c..b77f85d4bdc6 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py @@ -26,7 +26,12 @@ ActivateJobTriggerRequest, AllOtherDatabaseResources, AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, BigQueryDiscoveryTarget, BigQueryRegex, BigQueryRegexes, @@ -111,6 +116,9 @@ DiscoveryFileStoreConditions, DiscoveryGenerationCadence, DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, DiscoverySchemaModifiedCadence, DiscoveryStartingLocation, DiscoveryTableModifiedCadence, @@ -203,6 +211,12 @@ MetadataLocation, MetadataType, NullPercentageLevel, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, OtherInfoTypeSummary, OutputStorageConfig, PrimitiveTransformation, @@ -307,7 +321,12 @@ "ActivateJobTriggerRequest", "AllOtherDatabaseResources", "AllOtherResources", + "AmazonS3Bucket", + "AmazonS3BucketConditions", + "AmazonS3BucketRegex", "AnalyzeDataSourceRiskDetails", + "AwsAccount", + "AwsAccountRegex", "BigQueryDiscoveryTarget", "BigQueryRegex", "BigQueryRegexes", @@ -385,6 +404,9 @@ "DiscoveryFileStoreConditions", "DiscoveryGenerationCadence", "DiscoveryInspectTemplateModifiedCadence", + "DiscoveryOtherCloudConditions", + "DiscoveryOtherCloudFilter", + "DiscoveryOtherCloudGenerationCadence", "DiscoverySchemaModifiedCadence", "DiscoveryStartingLocation", "DiscoveryTableModifiedCadence", @@ -471,6 +493,12 @@ "Location", "Manual", "MetadataLocation", + "OtherCloudDiscoveryStartingLocation", + "OtherCloudDiscoveryTarget", + "OtherCloudResourceCollection", + "OtherCloudResourceRegex", + "OtherCloudResourceRegexes", + "OtherCloudSingleResourceReference", "OtherInfoTypeSummary", "OutputStorageConfig", "PrimitiveTransformation", diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py index 80b0be2c935b..4d47e1ac68f6 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py @@ -25,7 +25,12 @@ ActivateJobTriggerRequest, AllOtherDatabaseResources, AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, BigQueryDiscoveryTarget, BigQueryRegex, BigQueryRegexes, @@ -110,6 +115,9 @@ DiscoveryFileStoreConditions, DiscoveryGenerationCadence, DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, DiscoverySchemaModifiedCadence, DiscoveryStartingLocation, DiscoveryTableModifiedCadence, @@ -202,6 +210,12 @@ MetadataLocation, MetadataType, NullPercentageLevel, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, OtherInfoTypeSummary, OutputStorageConfig, PrimitiveTransformation, @@ -305,7 +319,12 @@ "ActivateJobTriggerRequest", "AllOtherDatabaseResources", "AllOtherResources", + "AmazonS3Bucket", + "AmazonS3BucketConditions", + "AmazonS3BucketRegex", "AnalyzeDataSourceRiskDetails", + "AwsAccount", + "AwsAccountRegex", "BigQueryDiscoveryTarget", "BigQueryField", "BigQueryKey", @@ -401,6 +420,9 @@ "DiscoveryFileStoreConditions", "DiscoveryGenerationCadence", "DiscoveryInspectTemplateModifiedCadence", + "DiscoveryOtherCloudConditions", + "DiscoveryOtherCloudFilter", + "DiscoveryOtherCloudGenerationCadence", "DiscoverySchemaModifiedCadence", "DiscoveryStartingLocation", "DiscoveryTableModifiedCadence", @@ -502,6 +524,12 @@ "MetadataLocation", "MetadataType", "NullPercentageLevel", + "OtherCloudDiscoveryStartingLocation", + "OtherCloudDiscoveryTarget", + "OtherCloudResourceCollection", + "OtherCloudResourceRegex", + "OtherCloudResourceRegexes", + "OtherCloudSingleResourceReference", "OtherInfoTypeSummary", "OutputStorageConfig", "PartitionId", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py index af7fdb0a0cee..2f384158ce0c 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -5352,6 +5352,7 @@ async def sample_get_file_store_data_profile(): The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py index 198f49f1e8e8..7163280e5c62 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py @@ -5877,6 +5877,7 @@ def sample_get_file_store_data_profile(): The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py index ed604aeb0b85..df56876a3b22 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -4046,6 +4046,7 @@ def __call__( The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. """ diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py index b660eb26c59b..aef58e903c8e 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py @@ -19,7 +19,12 @@ ActivateJobTriggerRequest, AllOtherDatabaseResources, AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, BigQueryDiscoveryTarget, BigQueryRegex, BigQueryRegexes, @@ -104,6 +109,9 @@ DiscoveryFileStoreConditions, DiscoveryGenerationCadence, DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, DiscoverySchemaModifiedCadence, DiscoveryStartingLocation, DiscoveryTableModifiedCadence, @@ -196,6 +204,12 @@ MetadataLocation, MetadataType, NullPercentageLevel, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, OtherInfoTypeSummary, OutputStorageConfig, PrimitiveTransformation, @@ -298,7 +312,12 @@ "ActivateJobTriggerRequest", "AllOtherDatabaseResources", "AllOtherResources", + "AmazonS3Bucket", + "AmazonS3BucketConditions", + "AmazonS3BucketRegex", "AnalyzeDataSourceRiskDetails", + "AwsAccount", + "AwsAccountRegex", "BigQueryDiscoveryTarget", "BigQueryRegex", "BigQueryRegexes", @@ -376,6 +395,9 @@ "DiscoveryFileStoreConditions", "DiscoveryGenerationCadence", "DiscoveryInspectTemplateModifiedCadence", + "DiscoveryOtherCloudConditions", + "DiscoveryOtherCloudFilter", + "DiscoveryOtherCloudGenerationCadence", "DiscoverySchemaModifiedCadence", "DiscoveryStartingLocation", "DiscoveryTableModifiedCadence", @@ -462,6 +484,12 @@ "Location", "Manual", "MetadataLocation", + "OtherCloudDiscoveryStartingLocation", + "OtherCloudDiscoveryTarget", + "OtherCloudResourceCollection", + "OtherCloudResourceRegex", + "OtherCloudResourceRegexes", + "OtherCloudSingleResourceReference", "OtherInfoTypeSummary", "OutputStorageConfig", "PrimitiveTransformation", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py index a92c0cfd6de3..e5d778fcae6d 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -206,7 +206,21 @@ "DiscoveryCloudStorageGenerationCadence", "DiscoveryCloudStorageConditions", "DiscoveryFileStoreConditions", + "OtherCloudDiscoveryTarget", + "DiscoveryOtherCloudFilter", + "OtherCloudResourceCollection", + "OtherCloudResourceRegexes", + "OtherCloudResourceRegex", + "AwsAccountRegex", + "AmazonS3BucketRegex", + "OtherCloudSingleResourceReference", + "AwsAccount", + "AmazonS3Bucket", + "DiscoveryOtherCloudConditions", + "AmazonS3BucketConditions", + "DiscoveryOtherCloudGenerationCadence", "DiscoveryStartingLocation", + "OtherCloudDiscoveryStartingLocation", "AllOtherResources", "DlpJob", "GetDlpJobRequest", @@ -7827,6 +7841,18 @@ class DataProfileAction(proto.Message): pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): Publish a message into the Pub/Sub topic. + This field is a member of `oneof`_ ``action``. + publish_to_chronicle (google.cloud.dlp_v2.types.DataProfileAction.PublishToChronicle): + Publishes generated data profiles to Google Security + Operations. For more information, see `Use Sensitive Data + Protection data in context-aware + analytics `__. + + This field is a member of `oneof`_ ``action``. + publish_to_scc (google.cloud.dlp_v2.types.DataProfileAction.PublishToSecurityCommandCenter): + Publishes findings to SCC for each data + profile. + This field is a member of `oneof`_ ``action``. tag_resources (google.cloud.dlp_v2.types.DataProfileAction.TagResources): Tags the profiled resources with the @@ -7947,6 +7973,18 @@ class DetailLevel(proto.Enum): ) ) + class PublishToChronicle(proto.Message): + r"""Message expressing intention to publish to Google Security + Operations. + + """ + + class PublishToSecurityCommandCenter(proto.Message): + r"""If set, a summary finding will be created/updated in SCC for + each profile. + + """ + class TagResources(proto.Message): r"""If set, attaches the [tags] (https://cloud.google.com/resource-manager/docs/tags/tags-overview) @@ -8062,6 +8100,18 @@ class TagValue(proto.Message): oneof="action", message=PubSubNotification, ) + publish_to_chronicle: PublishToChronicle = proto.Field( + proto.MESSAGE, + number=3, + oneof="action", + message=PublishToChronicle, + ) + publish_to_scc: PublishToSecurityCommandCenter = proto.Field( + proto.MESSAGE, + number=4, + oneof="action", + message=PublishToSecurityCommandCenter, + ) tag_resources: TagResources = proto.Field( proto.MESSAGE, number=8, @@ -8087,6 +8137,8 @@ class DataProfileJobConfig(proto.Message): service account that exists within this project must have access to all resources that are profiled, and the Cloud DLP API must be enabled. + other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): + Must be set only when scanning other clouds. inspect_templates (MutableSequence[str]): Detection logic for profile generation. @@ -8119,6 +8171,11 @@ class DataProfileJobConfig(proto.Message): proto.STRING, number=5, ) + other_cloud_starting_location: "OtherCloudDiscoveryStartingLocation" = proto.Field( + proto.MESSAGE, + number=8, + message="OtherCloudDiscoveryStartingLocation", + ) inspect_templates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=7, @@ -8251,6 +8308,8 @@ class DiscoveryConfig(proto.Message): Display name (max 100 chars) org_config (google.cloud.dlp_v2.types.DiscoveryConfig.OrgConfig): Only set when the parent is an org. + other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): + Must be set only when scanning other clouds. inspect_templates (MutableSequence[str]): Detection logic for profile generation. @@ -8348,6 +8407,11 @@ class OrgConfig(proto.Message): number=2, message=OrgConfig, ) + other_cloud_starting_location: "OtherCloudDiscoveryStartingLocation" = proto.Field( + proto.MESSAGE, + number=12, + message="OtherCloudDiscoveryStartingLocation", + ) inspect_templates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, @@ -8422,6 +8486,12 @@ class DiscoveryTarget(proto.Message): Cloud Storage target for Discovery. The first target to match a table will be the one applied. + This field is a member of `oneof`_ ``target``. + other_cloud_target (google.cloud.dlp_v2.types.OtherCloudDiscoveryTarget): + Other clouds target for discovery. The first + target to match a resource will be the one + applied. + This field is a member of `oneof`_ ``target``. """ @@ -8449,6 +8519,12 @@ class DiscoveryTarget(proto.Message): oneof="target", message="CloudStorageDiscoveryTarget", ) + other_cloud_target: "OtherCloudDiscoveryTarget" = proto.Field( + proto.MESSAGE, + number=5, + oneof="target", + message="OtherCloudDiscoveryTarget", + ) class BigQueryDiscoveryTarget(proto.Message): @@ -9592,6 +9668,414 @@ class DiscoveryFileStoreConditions(proto.Message): ) +class OtherCloudDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery of resources from other + clouds. An `AWS connector in Security Command Center + (Enterprise `__ + is required to use this feature. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + Required. The type of data profiles generated by this + discovery target. Supported values are: + + - aws/s3/bucket + filter (google.cloud.dlp_v2.types.DiscoveryOtherCloudFilter): + Required. The resources that the discovery + cadence applies to. The first target with a + matching filter will be the one to apply to a + resource. + conditions (google.cloud.dlp_v2.types.DiscoveryOtherCloudConditions): + Optional. In addition to matching the filter, + these conditions must be true before a profile + is generated. + generation_cadence (google.cloud.dlp_v2.types.DiscoveryOtherCloudGenerationCadence): + How often and when to update data profiles. + New resources that match both the filter and + conditions are scanned as quickly as possible + depending on system capacity. + + This field is a member of `oneof`_ ``cadence``. + disabled (google.cloud.dlp_v2.types.Disabled): + Disable profiling for resources that match + this filter. + + This field is a member of `oneof`_ ``cadence``. + """ + + data_source_type: "DataSourceType" = proto.Field( + proto.MESSAGE, + number=1, + message="DataSourceType", + ) + filter: "DiscoveryOtherCloudFilter" = proto.Field( + proto.MESSAGE, + number=2, + message="DiscoveryOtherCloudFilter", + ) + conditions: "DiscoveryOtherCloudConditions" = proto.Field( + proto.MESSAGE, + number=3, + message="DiscoveryOtherCloudConditions", + ) + generation_cadence: "DiscoveryOtherCloudGenerationCadence" = proto.Field( + proto.MESSAGE, + number=4, + oneof="cadence", + message="DiscoveryOtherCloudGenerationCadence", + ) + disabled: "Disabled" = proto.Field( + proto.MESSAGE, + number=5, + oneof="cadence", + message="Disabled", + ) + + +class DiscoveryOtherCloudFilter(proto.Message): + r"""Determines which resources from the other cloud will have + profiles generated. Includes the ability to filter by resource + names. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collection (google.cloud.dlp_v2.types.OtherCloudResourceCollection): + A collection of resources for this filter to + apply to. + + This field is a member of `oneof`_ ``filter``. + single_resource (google.cloud.dlp_v2.types.OtherCloudSingleResourceReference): + The resource to scan. Configs using this + filter can only have one target (the target with + this single resource reference). + + This field is a member of `oneof`_ ``filter``. + others (google.cloud.dlp_v2.types.AllOtherResources): + Optional. Catch-all. This should always be + the last target in the list because anything + above it will apply first. Should only appear + once in a configuration. If none is specified, a + default one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + """ + + collection: "OtherCloudResourceCollection" = proto.Field( + proto.MESSAGE, + number=1, + oneof="filter", + message="OtherCloudResourceCollection", + ) + single_resource: "OtherCloudSingleResourceReference" = proto.Field( + proto.MESSAGE, + number=2, + oneof="filter", + message="OtherCloudSingleResourceReference", + ) + others: "AllOtherResources" = proto.Field( + proto.MESSAGE, + number=100, + oneof="filter", + message="AllOtherResources", + ) + + +class OtherCloudResourceCollection(proto.Message): + r"""Match resources using regex filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_regexes (google.cloud.dlp_v2.types.OtherCloudResourceRegexes): + A collection of regular expressions to match + a resource against. + + This field is a member of `oneof`_ ``pattern``. + """ + + include_regexes: "OtherCloudResourceRegexes" = proto.Field( + proto.MESSAGE, + number=1, + oneof="pattern", + message="OtherCloudResourceRegexes", + ) + + +class OtherCloudResourceRegexes(proto.Message): + r"""A collection of regular expressions to determine what + resources to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.OtherCloudResourceRegex]): + A group of regular expression patterns to + match against one or more resources. + Maximum of 100 entries. The sum of all regular + expression's length can't exceed 10 KiB. + """ + + patterns: MutableSequence["OtherCloudResourceRegex"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OtherCloudResourceRegex", + ) + + +class OtherCloudResourceRegex(proto.Message): + r"""A pattern to match against one or more resources. At least one + pattern must be specified. Regular expressions use RE2 + `syntax `__; a guide can + be found under the google/re2 repository on GitHub. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amazon_s3_bucket_regex (google.cloud.dlp_v2.types.AmazonS3BucketRegex): + Regex for Amazon S3 buckets. + + This field is a member of `oneof`_ ``resource_regex``. + """ + + amazon_s3_bucket_regex: "AmazonS3BucketRegex" = proto.Field( + proto.MESSAGE, + number=1, + oneof="resource_regex", + message="AmazonS3BucketRegex", + ) + + +class AwsAccountRegex(proto.Message): + r"""AWS account regex. + + Attributes: + account_id_regex (str): + Optional. Regex to test the AWS account ID + against. If empty, all accounts match. + """ + + account_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AmazonS3BucketRegex(proto.Message): + r"""Amazon S3 bucket regex. + + Attributes: + aws_account_regex (google.cloud.dlp_v2.types.AwsAccountRegex): + The AWS account regex. + bucket_name_regex (str): + Optional. Regex to test the bucket name + against. If empty, all buckets match. + """ + + aws_account_regex: "AwsAccountRegex" = proto.Field( + proto.MESSAGE, + number=1, + message="AwsAccountRegex", + ) + bucket_name_regex: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OtherCloudSingleResourceReference(proto.Message): + r"""Identifies a single resource, like a single Amazon S3 bucket. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amazon_s3_bucket (google.cloud.dlp_v2.types.AmazonS3Bucket): + Amazon S3 bucket. + + This field is a member of `oneof`_ ``resource``. + """ + + amazon_s3_bucket: "AmazonS3Bucket" = proto.Field( + proto.MESSAGE, + number=1, + oneof="resource", + message="AmazonS3Bucket", + ) + + +class AwsAccount(proto.Message): + r"""AWS account. + + Attributes: + account_id (str): + Required. AWS account ID. + """ + + account_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AmazonS3Bucket(proto.Message): + r"""Amazon S3 bucket. + + Attributes: + aws_account (google.cloud.dlp_v2.types.AwsAccount): + The AWS account. + bucket_name (str): + Required. The bucket name. + """ + + aws_account: "AwsAccount" = proto.Field( + proto.MESSAGE, + number=1, + message="AwsAccount", + ) + bucket_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DiscoveryOtherCloudConditions(proto.Message): + r"""Requirements that must be true before a resource is profiled + for the first time. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_age (google.protobuf.duration_pb2.Duration): + Minimum age a resource must be before Cloud + DLP can profile it. Value must be 1 hour or + greater. + amazon_s3_bucket_conditions (google.cloud.dlp_v2.types.AmazonS3BucketConditions): + Amazon S3 bucket conditions. + + This field is a member of `oneof`_ ``conditions``. + """ + + min_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + amazon_s3_bucket_conditions: "AmazonS3BucketConditions" = proto.Field( + proto.MESSAGE, + number=2, + oneof="conditions", + message="AmazonS3BucketConditions", + ) + + +class AmazonS3BucketConditions(proto.Message): + r"""Amazon S3 bucket conditions. + + Attributes: + bucket_types (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.BucketType]): + Optional. Bucket types that should be profiled. Optional. + Defaults to TYPE_ALL_SUPPORTED if unspecified. + object_storage_classes (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.ObjectStorageClass]): + Optional. Object classes that should be profiled. Optional. + Defaults to ALL_SUPPORTED_CLASSES if unspecified. + """ + + class BucketType(proto.Enum): + r"""Supported Amazon S3 bucket types. Defaults to TYPE_ALL_SUPPORTED. + + Values: + TYPE_UNSPECIFIED (0): + Unused. + TYPE_ALL_SUPPORTED (1): + All supported classes. + TYPE_GENERAL_PURPOSE (2): + A general purpose Amazon S3 bucket. + """ + TYPE_UNSPECIFIED = 0 + TYPE_ALL_SUPPORTED = 1 + TYPE_GENERAL_PURPOSE = 2 + + class ObjectStorageClass(proto.Enum): + r"""Supported Amazon S3 object storage classes. Defaults to + ALL_SUPPORTED_CLASSES. + + Values: + UNSPECIFIED (0): + Unused. + ALL_SUPPORTED_CLASSES (1): + All supported classes. + STANDARD (2): + Standard object class. + STANDARD_INFREQUENT_ACCESS (4): + Standard - infrequent access object class. + GLACIER_INSTANT_RETRIEVAL (6): + Glacier - instant retrieval object class. + INTELLIGENT_TIERING (7): + Objects in the S3 Intelligent-Tiering access + tiers. + """ + UNSPECIFIED = 0 + ALL_SUPPORTED_CLASSES = 1 + STANDARD = 2 + STANDARD_INFREQUENT_ACCESS = 4 + GLACIER_INSTANT_RETRIEVAL = 6 + INTELLIGENT_TIERING = 7 + + bucket_types: MutableSequence[BucketType] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=BucketType, + ) + object_storage_classes: MutableSequence[ObjectStorageClass] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=ObjectStorageClass, + ) + + +class DiscoveryOtherCloudGenerationCadence(proto.Message): + r"""How often existing resources should have their profiles + refreshed. New resources are scanned as quickly as possible + depending on system capacity. + + Attributes: + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Optional. Frequency to update profiles + regardless of whether the underlying resource + has changes. Defaults to never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Optional. Governs when to update data profiles when the + inspection rules defined by the ``InspectTemplate`` change. + If not set, changing the template will not cause a data + profile to update. + """ + + refresh_frequency: "DataProfileUpdateFrequency" = proto.Field( + proto.ENUM, + number=1, + enum="DataProfileUpdateFrequency", + ) + inspect_template_modified_cadence: "DiscoveryInspectTemplateModifiedCadence" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="DiscoveryInspectTemplateModifiedCadence", + ) + ) + + class DiscoveryStartingLocation(proto.Message): r"""The location to begin a discovery scan. Denotes an organization ID or folder ID within an organization. @@ -9627,6 +10111,62 @@ class DiscoveryStartingLocation(proto.Message): ) +class OtherCloudDiscoveryStartingLocation(proto.Message): + r"""The other cloud starting location for discovery. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aws_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation.AwsDiscoveryStartingLocation): + The AWS starting location for discovery. + + This field is a member of `oneof`_ ``location``. + """ + + class AwsDiscoveryStartingLocation(proto.Message): + r"""The AWS starting location for discovery. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + account_id (str): + The AWS account ID that this discovery config applies to. + Within an AWS organization, you can find the AWS account ID + inside an AWS account ARN. Example: + arn:{partition}:organizations::{management_account_id}:account/{org_id}/{account_id} + + This field is a member of `oneof`_ ``scope``. + all_asset_inventory_assets (bool): + All AWS assets stored in Asset Inventory that + didn't match other AWS discovery configs. + + This field is a member of `oneof`_ ``scope``. + """ + + account_id: str = proto.Field( + proto.STRING, + number=2, + oneof="scope", + ) + all_asset_inventory_assets: bool = proto.Field( + proto.BOOL, + number=3, + oneof="scope", + ) + + aws_location: AwsDiscoveryStartingLocation = proto.Field( + proto.MESSAGE, + number=1, + oneof="location", + message=AwsDiscoveryStartingLocation, + ) + + class AllOtherResources(proto.Message): r"""Match discovery resources not covered by any other filter.""" @@ -11260,7 +11800,7 @@ class ProjectDataProfile(proto.Message): name (str): The resource name of the profile. project_id (str): - Project ID that was profiled. + Project ID or account that was profiled. profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): The last time the profile was generated. sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): @@ -11918,6 +12458,7 @@ class FileStoreDataProfile(proto.Message): r"""The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. Attributes: name (str): @@ -11929,12 +12470,15 @@ class FileStoreDataProfile(proto.Message): for this file store. project_id (str): The Google Cloud project ID that owns the - resource. + resource. For Amazon S3 buckets, this is the AWS + Account Id. file_store_location (str): The location of the file store. - Cloud Storage: https://cloud.google.com/storage/docs/locations#available-locations + - Amazon S3: + https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints data_storage_locations (MutableSequence[str]): For resources that have multiple storage locations, these are those regions. For Cloud Storage this is the list of @@ -11951,9 +12495,13 @@ class FileStoreDataProfile(proto.Message): The file store path. - Cloud Storage: ``gs://{bucket}`` + - Amazon S3: ``s3://{bucket}`` full_resource (str): The resource name of the resource profiled. https://cloud.google.com/apis/design/resource_names#full_resource_name + + Example format of an S3 bucket full resource name: + ``//cloudasset.googleapis.com/organizations/{org_id}/otherCloudConnections/aws/arn:aws:s3:::{bucket_name}`` config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): The snapshot of the configurations used to generate the profile. @@ -12322,6 +12870,7 @@ class ListFileStoreDataProfilesRequest(proto.Message): - Supported fields/values: - ``project_id`` - The Google Cloud project ID. + - ``account_id`` - The AWS account ID. - ``file_store_path`` - The path like "gs://bucket". - ``data_source_type`` - The profile's data source type, like "google/storage/bucket". @@ -13003,9 +13552,13 @@ class DataSourceType(proto.Message): Attributes: data_source (str): - Output only. An identifying string to the - type of resource being profiled. Current values: - google/bigquery/table, google/project + Output only. An identifying string to the type of resource + being profiled. Current values: + + - google/bigquery/table + - google/project + - google/sql/table + - google/gcs/bucket """ data_source: str = proto.Field( diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index cb1c758e62fc..4da85d5c6cd9 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "3.22.0" + "version": "0.1.0" }, "snippets": [ { From e889809389c5b194ec77955664eb2859cde28d73 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 19:36:04 -0400 Subject: [PATCH 30/59] feat: [google-cloud-build] Add LEGACY_BUCKET option to DefaultLogsBucketBehavior (#13099) BEGIN_COMMIT_OVERRIDE feat: Add LEGACY_BUCKET option to DefaultLogsBucketBehavior docs: Sanitize docs END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: Sanitize docs PiperOrigin-RevId: 677021009 Source-Link: https://github.com/googleapis/googleapis/commit/a18d9b2c3563527b26c4b713469e795b92795271 Source-Link: https://github.com/googleapis/googleapis-gen/commit/09d68f35365c74ad276cea3e7c26553a1485faa0 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJ1aWxkLy5Pd2xCb3QueWFtbCIsImgiOiIwOWQ2OGYzNTM2NWM3NGFkMjc2Y2VhM2U3YzI2NTUzYTE0ODVmYWEwIn0= --------- Co-authored-by: Owl Bot --- .../cloud/devtools/cloudbuild_v1/types/cloudbuild.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index a10715d0ffdf..995ae202614c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -3278,7 +3278,7 @@ class LoggingMode(proto.Enum): NONE = 4 class DefaultLogsBucketBehavior(proto.Enum): - r"""Default GCS log bucket behavior options. + r"""Default Cloud Storage log bucket behavior options. Values: DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): @@ -3287,10 +3287,15 @@ class DefaultLogsBucketBehavior(proto.Enum): Bucket is located in user-owned project in the same region as the build. The builder service account must have access to create and - write to GCS buckets in the build project. + write to Cloud Storage buckets in the build + project. + LEGACY_BUCKET (2): + Bucket is located in a Google-owned project + and is not regionalized. """ DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 REGIONAL_USER_OWNED_BUCKET = 1 + LEGACY_BUCKET = 2 class PoolOption(proto.Message): r"""Details about how a build should be executed on a ``WorkerPool``. From d6238e49a17caf54dd0fbc45215527beed057cc5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 19:44:15 -0400 Subject: [PATCH 31/59] feat: [google-ai-generativelanguage] Add GoogleSearchRetrieval tool and candidate.grounding_metadata (#13098) BEGIN_COMMIT_OVERRIDE feat: Add GoogleSearchRetrieval tool and candidate.grounding_metadata feat: Add Schema.min_items feat: Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} feat: Add PredictionService (for Imagen) feat: Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY feat: Add HarmBlockThreshold.OFF feat: Add TunedModels.reader_project_numbers docs: Small fixes docs: Tag HarmCategories by the model family they're used on. END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Add Schema.min_items feat: Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} feat: Add PredictionService (for Imagen) feat: Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY feat: Add HarmBlockThreshold.OFF feat: Add TunedModels.reader_project_numbers docs: Small fixes docs: Tag HarmCategories by the model family they're used on. PiperOrigin-RevId: 676982731 Source-Link: https://github.com/googleapis/googleapis/commit/979f71cfaea54d9bc03543647da4392f052c801e Source-Link: https://github.com/googleapis/googleapis-gen/commit/f5e280f1d6258abe58a0b910102e97ce8d82d948 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiZjVlMjgwZjFkNjI1OGFiZTU4YTBiOTEwMTAyZTk3Y2U4ZDgyZDk0OCJ9 BEGIN_NESTED_COMMIT feat: [google-ai-generativelanguage] Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} feat: Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY docs: Tag HarmCategories by the model family they're used on. feat: Add HarmBlockThreshold.OFF docs: Small fixes PiperOrigin-RevId: 676980561 Source-Link: https://github.com/googleapis/googleapis/commit/1cb097eee9a3f9210911063649eb0d2c3b023596 Source-Link: https://github.com/googleapis/googleapis-gen/commit/04d3969939b2bf4e3db9f1aef88d18a9bedc8ae7 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiMDRkMzk2OTkzOWIyYmY0ZTNkYjlmMWFlZjg4ZDE4YTliZWRjOGFlNyJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../prediction_service.rst | 6 + .../generativelanguage_v1beta/services_.rst | 1 + .../google/ai/generativelanguage/__init__.py | 32 + .../ai/generativelanguage/gapic_version.py | 2 +- .../ai/generativelanguage_v1/__init__.py | 2 + .../ai/generativelanguage_v1/gapic_version.py | 2 +- .../generative_service/transports/rest.py | 4 + .../services/model_service/transports/rest.py | 4 + .../generativelanguage_v1/types/__init__.py | 2 + .../types/generative_service.py | 161 + .../ai/generativelanguage_v1/types/safety.py | 37 +- .../ai/generativelanguage_v1beta/__init__.py | 27 + .../gapic_metadata.json | 34 + .../gapic_version.py | 2 +- .../services/prediction_service/__init__.py | 22 + .../prediction_service/async_client.py | 391 +++ .../services/prediction_service/client.py | 814 ++++++ .../prediction_service/transports/__init__.py | 36 + .../prediction_service/transports/base.py | 165 ++ .../prediction_service/transports/grpc.py | 274 ++ .../transports/grpc_asyncio.py | 285 ++ .../prediction_service/transports/rest.py | 313 ++ .../types/__init__.py | 21 + .../types/content.py | 93 +- .../types/generative_service.py | 394 ++- .../types/prediction_service.py | 79 + .../generativelanguage_v1beta/types/safety.py | 37 +- .../types/tuned_model.py | 7 + .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- ...erated_prediction_service_predict_async.py | 56 + ...nerated_prediction_service_predict_sync.py | 56 + ...adata_google.ai.generativelanguage.v1.json | 2 +- ...a_google.ai.generativelanguage.v1beta.json | 171 +- ..._google.ai.generativelanguage.v1beta2.json | 2 +- ..._google.ai.generativelanguage.v1beta3.json | 2 +- ...ixup_generativelanguage_v1beta_keywords.py | 1 + .../test_cache_service.py | 8 + .../test_model_service.py | 16 + .../test_prediction_service.py | 2586 +++++++++++++++++ 40 files changed, 6100 insertions(+), 51 deletions(-) create mode 100644 packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py create mode 100644 packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py create mode 100644 packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py create mode 100644 packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst new file mode 100644 index 000000000000..7b2b932acacc --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst @@ -0,0 +1,6 @@ +PredictionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.prediction_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst index 24e6184e8b0e..7a7b5429bd6f 100644 --- a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst @@ -9,5 +9,6 @@ Services for Google Ai Generativelanguage v1beta API generative_service model_service permission_service + prediction_service retriever_service text_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index c69803d506d6..750b54051c3f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -54,6 +54,12 @@ from google.ai.generativelanguage_v1beta.services.permission_service.client import ( PermissionServiceClient, ) +from google.ai.generativelanguage_v1beta.services.prediction_service.async_client import ( + PredictionServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.prediction_service.client import ( + PredictionServiceClient, +) from google.ai.generativelanguage_v1beta.services.retriever_service.async_client import ( RetrieverServiceAsyncClient, ) @@ -84,12 +90,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -132,6 +140,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -159,6 +174,10 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from google.ai.generativelanguage_v1beta.types.prediction_service import ( + PredictRequest, + PredictResponse, +) from google.ai.generativelanguage_v1beta.types.retriever import ( Chunk, ChunkData, @@ -243,6 +262,8 @@ "ModelServiceAsyncClient", "PermissionServiceClient", "PermissionServiceAsyncClient", + "PredictionServiceClient", + "PredictionServiceAsyncClient", "RetrieverServiceClient", "RetrieverServiceAsyncClient", "TextServiceClient", @@ -260,12 +281,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -303,6 +326,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -325,6 +355,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py index 4c8665b1b49a..a383f98f4342 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py @@ -37,6 +37,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .types.model import Model @@ -66,6 +67,7 @@ "HarmCategory", "ListModelsRequest", "ListModelsResponse", + "LogprobsResult", "Model", "ModelServiceClient", "Part", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py index 7498cba8d7e3..7caa772eb19f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -1057,6 +1057,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py index e21bd17d6e8b..a431622cc869 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py @@ -572,6 +572,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py index 522ecb07c1c3..9156b856ee0e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py @@ -27,6 +27,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .model import Model @@ -50,6 +51,7 @@ "GenerateContentRequest", "GenerateContentResponse", "GenerationConfig", + "LogprobsResult", "TaskType", "Model", "GetModelRequest", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index e19c5b166abc..e8062906bfbf 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -31,6 +31,7 @@ "GenerationConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "EmbedContentRequest", "ContentEmbedding", "EmbedContentResponse", @@ -228,6 +229,58 @@ class GenerationConfig(proto.Message): doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -259,6 +312,26 @@ class GenerationConfig(proto.Message): number=7, optional=True, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class GenerateContentResponse(proto.Message): @@ -414,6 +487,11 @@ class Candidate(proto.Message): foundational LLM's training data. token_count (int): Output only. Token count for this candidate. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -494,6 +572,89 @@ class FinishReason(proto.Enum): proto.INT32, number=7, ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class EmbedContentRequest(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py index 100fc75977da..2a75fd715410 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py @@ -39,31 +39,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -76,6 +77,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class SafetyRating(proto.Message): @@ -170,12 +172,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py index c692fa7725c9..73da8c53fefc 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -30,6 +30,10 @@ PermissionServiceAsyncClient, PermissionServiceClient, ) +from .services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, +) from .services.retriever_service import ( RetrieverServiceAsyncClient, RetrieverServiceClient, @@ -50,12 +54,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -98,6 +104,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -125,6 +138,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .types.prediction_service import PredictRequest, PredictResponse from .types.retriever import ( Chunk, ChunkData, @@ -203,6 +217,7 @@ "GenerativeServiceAsyncClient", "ModelServiceAsyncClient", "PermissionServiceAsyncClient", + "PredictionServiceAsyncClient", "RetrieverServiceAsyncClient", "TextServiceAsyncClient", "AttributionSourceId", @@ -256,6 +271,7 @@ "DeleteTunedModelRequest", "DiscussServiceClient", "Document", + "DynamicRetrievalConfig", "EmbedContentRequest", "EmbedContentResponse", "EmbedTextRequest", @@ -288,9 +304,13 @@ "GetModelRequest", "GetPermissionRequest", "GetTunedModelRequest", + "GoogleSearchRetrieval", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", "GroundingPassage", "GroundingPassages", + "GroundingSupport", "HarmCategory", "Hyperparameters", "ListCachedContentsRequest", @@ -309,6 +329,7 @@ "ListPermissionsResponse", "ListTunedModelsRequest", "ListTunedModelsResponse", + "LogprobsResult", "Message", "MessagePrompt", "MetadataFilter", @@ -317,16 +338,22 @@ "Part", "Permission", "PermissionServiceClient", + "PredictRequest", + "PredictResponse", + "PredictionServiceClient", "QueryCorpusRequest", "QueryCorpusResponse", "QueryDocumentRequest", "QueryDocumentResponse", "RelevantChunk", + "RetrievalMetadata", "RetrieverServiceClient", "SafetyFeedback", "SafetyRating", "SafetySetting", "Schema", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "StringList", "TaskType", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json index 24a3b2565007..7fd1909f6ca0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json @@ -569,6 +569,40 @@ } } }, + "PredictionService": { + "clients": { + "grpc": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PredictionServiceAsyncClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "rest": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + } + } + }, "RetrieverService": { "clients": { "grpc": { diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py new file mode 100644 index 000000000000..6c64cf5ad1c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PredictionServiceAsyncClient +from .client import PredictionServiceClient + +__all__ = ( + "PredictionServiceClient", + "PredictionServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py new file mode 100644 index 000000000000..f9e04e3e2aea --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .client import PredictionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport + + +class PredictionServiceAsyncClient: + """A service for online predictions and explanations.""" + + _client: PredictionServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(PredictionServiceClient.model_path) + parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + PredictionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PredictionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PredictionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PredictionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PredictionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PredictionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PredictionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PredictionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PredictionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PredictionServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PredictionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (:class:`str`): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (:class:`MutableSequence[google.protobuf.struct_pb2.Value]`): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PredictionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py new file mode 100644 index 000000000000..48736239098d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc import PredictionServiceGrpcTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .transports.rest import PredictionServiceRestTransport + + +class PredictionServiceClientMeta(type): + """Metaclass for the PredictionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PredictionServiceTransport]] + _transport_registry["grpc"] = PredictionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PredictionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PredictionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PredictionServiceClient(metaclass=PredictionServiceClientMeta): + """A service for online predictions and explanations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or PredictionServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PredictionServiceClient._read_environment_variables() + self._client_cert_source = PredictionServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PredictionServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PredictionServiceTransport) + if transport_provided: + # transport is a PredictionServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PredictionServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PredictionServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PredictionServiceTransport], + Callable[..., PredictionServiceTransport], + ] = ( + PredictionServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PredictionServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances is not None: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PredictionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py new file mode 100644 index 000000000000..d6d645ba1ff1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport +from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .rest import PredictionServiceRestInterceptor, PredictionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] +_transport_registry["grpc"] = PredictionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PredictionServiceRestTransport + +__all__ = ( + "PredictionServiceTransport", + "PredictionServiceGrpcTransport", + "PredictionServiceGrpcAsyncIOTransport", + "PredictionServiceRestTransport", + "PredictionServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py new file mode 100644 index 000000000000..1b36658ad423 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import prediction_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PredictionServiceTransport(abc.ABC): + """Abstract transport class for PredictionService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.predict: gapic_v1.method.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Union[ + prediction_service.PredictResponse, + Awaitable[prediction_service.PredictResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PredictionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py new file mode 100644 index 000000000000..285c2ff8af46 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport + + +class PredictionServiceGrpcTransport(PredictionServiceTransport): + """gRPC backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + ~.PredictResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PredictionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1348f51f6706 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport + + +class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): + """gRPC AsyncIO backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Awaitable[prediction_service.PredictResponse], + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + Awaitable[~.PredictResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.predict: gapic_v1.method_async.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PredictionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py new file mode 100644 index 000000000000..0fd462caa988 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PredictionServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PredictionServiceRestInterceptor: + """Interceptor for PredictionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PredictionServiceRestTransport. + + .. code-block:: python + class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): + def pre_predict(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_predict(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) + client = PredictionServiceClient(transport=transport) + + + """ + + def pre_predict( + self, + request: prediction_service.PredictRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[prediction_service.PredictRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for predict + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_predict( + self, response: prediction_service.PredictResponse + ) -> prediction_service.PredictResponse: + """Post-rpc interceptor for predict + + Override in a subclass to manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PredictionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PredictionServiceRestInterceptor + + +class PredictionServiceRestTransport(PredictionServiceTransport): + """REST backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PredictionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PredictionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Predict(PredictionServiceRestStub): + def __hash__(self): + return hash("Predict") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: prediction_service.PredictRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Call the predict method over HTTP. + + Args: + request (~.prediction_service.PredictRequest): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.prediction_service.PredictResponse: + Response message for [PredictionService.Predict]. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:predict", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_predict(request, metadata) + pb_request = prediction_service.PredictRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = prediction_service.PredictResponse() + pb_resp = prediction_service.PredictResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_predict(resp) + return resp + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Predict(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PredictionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py index 89b4f8ad01b8..9dd7a564142d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -28,12 +28,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -76,6 +78,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -103,6 +112,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .prediction_service import PredictRequest, PredictResponse from .retriever import ( Chunk, ChunkData, @@ -188,12 +198,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -231,6 +243,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -253,6 +272,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index bbdbf7f24bc8..6b5d37cd15ce 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -31,6 +31,8 @@ "ExecutableCode", "CodeExecutionResult", "Tool", + "GoogleSearchRetrieval", + "DynamicRetrievalConfig", "CodeExecution", "ToolConfig", "FunctionCallingConfig", @@ -354,14 +356,18 @@ class Tool(proto.Message): The model or system does not execute the function. Instead the defined function may be returned as a - [FunctionCall][content.part.function_call] with arguments to - the client side for execution. The model may decide to call - a subset of these functions by populating - [FunctionCall][content.part.function_call] in the response. - The next conversation turn may contain a - [FunctionResponse][content.part.function_response] with the - [content.role] "function" generation context for the next - model turn. + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + with arguments to the client side for execution. The model + may decide to call a subset of these functions by populating + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + in the response. The next conversation turn may contain a + [FunctionResponse][google.ai.generativelanguage.v1beta.Part.function_response] + with the + [Content.role][google.ai.generativelanguage.v1beta.Content.role] + "function" generation context for the next model turn. + google_search_retrieval (google.ai.generativelanguage_v1beta.types.GoogleSearchRetrieval): + Optional. Retrieval tool that is powered by + Google search. code_execution (google.ai.generativelanguage_v1beta.types.CodeExecution): Optional. Enables the model to execute code as part of generation. @@ -372,6 +378,11 @@ class Tool(proto.Message): number=1, message="FunctionDeclaration", ) + google_search_retrieval: "GoogleSearchRetrieval" = proto.Field( + proto.MESSAGE, + number=2, + message="GoogleSearchRetrieval", + ) code_execution: "CodeExecution" = proto.Field( proto.MESSAGE, number=3, @@ -379,6 +390,65 @@ class Tool(proto.Message): ) +class GoogleSearchRetrieval(proto.Message): + r"""Tool to retrieve public web data for grounding, powered by + Google. + + Attributes: + dynamic_retrieval_config (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig): + Specifies the dynamic retrieval configuration + for the given source. + """ + + dynamic_retrieval_config: "DynamicRetrievalConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="DynamicRetrievalConfig", + ) + + +class DynamicRetrievalConfig(proto.Message): + r"""Describes the options to customize dynamic retrieval. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig.Mode): + The mode of the predictor to be used in + dynamic retrieval. + dynamic_threshold (float): + The threshold to be used in dynamic + retrieval. If not set, a system default value is + used. + + This field is a member of `oneof`_ ``_dynamic_threshold``. + """ + + class Mode(proto.Enum): + r"""The mode of the predictor to be used in dynamic retrieval. + + Values: + MODE_UNSPECIFIED (0): + Always trigger retrieval. + MODE_DYNAMIC (1): + Run retrieval only when system decides it is + necessary. + """ + MODE_UNSPECIFIED = 0 + MODE_DYNAMIC = 1 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + dynamic_threshold: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class CodeExecution(proto.Message): r"""Tool that executes code generated by the model, and automatically returns the result to the model. @@ -608,6 +678,9 @@ class Schema(proto.Message): max_items (int): Optional. Maximum number of the elements for Type.ARRAY. + min_items (int): + Optional. Minimum number of the elements for + Type.ARRAY. properties (MutableMapping[str, google.ai.generativelanguage_v1beta.types.Schema]): Optional. Properties of Type.OBJECT. required (MutableSequence[str]): @@ -645,6 +718,10 @@ class Schema(proto.Message): proto.INT64, number=21, ) + min_items: int = proto.Field( + proto.INT64, + number=22, + ) properties: MutableMapping[str, "Schema"] = proto.MapField( proto.STRING, proto.MESSAGE, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index b31b07aa4299..edc4c8ec0ff8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -32,8 +32,15 @@ "SemanticRetrieverConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "AttributionSourceId", "GroundingAttribution", + "RetrievalMetadata", + "GroundingMetadata", + "SearchEntryPoint", + "GroundingChunk", + "Segment", + "GroundingSupport", "GenerateAnswerRequest", "GenerateAnswerResponse", "EmbedContentRequest", @@ -289,7 +296,8 @@ class GenerationConfig(proto.Message): Optional. MIME type of the generated candidate text. Supported MIME types are: ``text/plain``: (default) Text output. ``application/json``: JSON response in the response - candidates. Refer to the + candidates. ``text/x.enum``: ENUM as a string response in + the response candidates. Refer to the `docs `__ for a list of all supported text MIME types. response_schema (google.ai.generativelanguage_v1beta.types.Schema): @@ -303,6 +311,58 @@ class GenerationConfig(proto.Message): JSON response. Refer to the `JSON text generation guide `__ for more details. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1beta.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1beta.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1beta.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1beta.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -343,6 +403,26 @@ class GenerationConfig(proto.Message): number=14, message=gag_content.Schema, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class SemanticRetrieverConfig(proto.Message): @@ -565,6 +645,15 @@ class Candidate(proto.Message): contributed to a grounded answer. This field is populated for ``GenerateAnswer`` calls. + grounding_metadata (google.ai.generativelanguage_v1beta.types.GroundingMetadata): + Output only. Grounding metadata for the candidate. + + This field is populated for ``GenerateContent`` calls. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1beta.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -652,6 +741,94 @@ class FinishReason(proto.Enum): number=8, message="GroundingAttribution", ) + grounding_metadata: "GroundingMetadata" = proto.Field( + proto.MESSAGE, + number=9, + message="GroundingMetadata", + ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class AttributionSourceId(proto.Message): @@ -758,6 +935,219 @@ class GroundingAttribution(proto.Message): ) +class RetrievalMetadata(proto.Message): + r"""Metadata related to retrieval in the grounding flow. + + Attributes: + google_search_dynamic_retrieval_score (float): + Optional. Score indicating how likely information from + google search could help answer the prompt. The score is in + the range [0, 1], where 0 is the least likely and 1 is the + most likely. This score is only populated when google search + grounding and dynamic retrieval is enabled. It will be + compared to the threshold to determine whether to trigger + google search. + """ + + google_search_dynamic_retrieval_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class GroundingMetadata(proto.Message): + r"""Metadata returned to client when grounding is enabled. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_entry_point (google.ai.generativelanguage_v1beta.types.SearchEntryPoint): + Optional. Google search entry for the + following-up web searches. + + This field is a member of `oneof`_ ``_search_entry_point``. + grounding_chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingChunk]): + List of supporting references retrieved from + specified grounding source. + grounding_supports (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingSupport]): + List of grounding support. + retrieval_metadata (google.ai.generativelanguage_v1beta.types.RetrievalMetadata): + Metadata related to retrieval in the + grounding flow. + + This field is a member of `oneof`_ ``_retrieval_metadata``. + """ + + search_entry_point: "SearchEntryPoint" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="SearchEntryPoint", + ) + grounding_chunks: MutableSequence["GroundingChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GroundingChunk", + ) + grounding_supports: MutableSequence["GroundingSupport"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GroundingSupport", + ) + retrieval_metadata: "RetrievalMetadata" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="RetrievalMetadata", + ) + + +class SearchEntryPoint(proto.Message): + r"""Google search entry point. + + Attributes: + rendered_content (str): + Optional. Web content snippet that can be + embedded in a web page or an app webview. + sdk_blob (bytes): + Optional. Base64 encoded JSON representing + array of tuple. + """ + + rendered_content: str = proto.Field( + proto.STRING, + number=1, + ) + sdk_blob: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class GroundingChunk(proto.Message): + r"""Grounding chunk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + web (google.ai.generativelanguage_v1beta.types.GroundingChunk.Web): + Grounding chunk from the web. + + This field is a member of `oneof`_ ``chunk_type``. + """ + + class Web(proto.Message): + r"""Chunk from the web. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + URI reference of the chunk. + + This field is a member of `oneof`_ ``_uri``. + title (str): + Title of the chunk. + + This field is a member of `oneof`_ ``_title``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + web: Web = proto.Field( + proto.MESSAGE, + number=1, + oneof="chunk_type", + message=Web, + ) + + +class Segment(proto.Message): + r"""Segment of the content. + + Attributes: + part_index (int): + Output only. The index of a Part object + within its parent Content object. + start_index (int): + Output only. Start index in the given Part, + measured in bytes. Offset from the start of the + Part, inclusive, starting at zero. + end_index (int): + Output only. End index in the given Part, + measured in bytes. Offset from the start of the + Part, exclusive, starting at zero. + text (str): + Output only. The text corresponding to the + segment from the response. + """ + + part_index: int = proto.Field( + proto.INT32, + number=1, + ) + start_index: int = proto.Field( + proto.INT32, + number=2, + ) + end_index: int = proto.Field( + proto.INT32, + number=3, + ) + text: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GroundingSupport(proto.Message): + r"""Grounding support. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.ai.generativelanguage_v1beta.types.Segment): + Segment of the content this support belongs + to. + + This field is a member of `oneof`_ ``_segment``. + grounding_chunk_indices (MutableSequence[int]): + A list of indices (into 'grounding_chunk') specifying the + citations associated with the claim. For instance [1,3,4] + means that grounding_chunk[1], grounding_chunk[3], + grounding_chunk[4] are the retrieved content attributed to + the claim. + confidence_scores (MutableSequence[float]): + Confidence score of the support references. Ranges from 0 to + 1. 1 is the most confident. This list must have the same + size as the grounding_chunk_indices. + """ + + segment: "Segment" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Segment", + ) + grounding_chunk_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + confidence_scores: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=3, + ) + + class GenerateAnswerRequest(proto.Message): r"""Request to generate a grounded answer from the ``Model``. @@ -1207,7 +1597,7 @@ class CountTokensResponse(proto.Message): ``prompt`` into. Always non-negative. cached_content_token_count (int): Number of tokens in the cached part of the - prompt, i.e. in the cached content. + prompt (the cached content). """ total_tokens: int = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py new file mode 100644 index 000000000000..b6a659782edf --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "PredictRequest", + "PredictResponse", + }, +) + + +class PredictRequest(proto.Message): + r"""Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + + Attributes: + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the input to + the prediction call. + parameters (google.protobuf.struct_pb2.Value): + Optional. The parameters that govern the + prediction call. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + instances: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + parameters: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Value, + ) + + +class PredictResponse(proto.Message): + r"""Response message for [PredictionService.Predict]. + + Attributes: + predictions (MutableSequence[google.protobuf.struct_pb2.Value]): + The outputs of the prediction call. + """ + + predictions: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py index 113590701d4b..8ede1042a0ac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py @@ -41,31 +41,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -78,6 +79,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class ContentFilter(proto.Message): @@ -249,12 +251,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py index a45283f33632..3be0f7e6b586 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py @@ -118,6 +118,9 @@ class TunedModel(proto.Message): tuning_task (google.ai.generativelanguage_v1beta.types.TuningTask): Required. The tuning task that creates the tuned model. + reader_project_numbers (MutableSequence[int]): + Optional. List of project numbers that have + read access to the tuned model. """ class State(proto.Enum): @@ -196,6 +199,10 @@ class State(proto.Enum): number=10, message="TuningTask", ) + reader_project_numbers: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=14, + ) class TunedModelSource(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py new file mode 100644 index 000000000000..851ebfa44e4d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py new file mode 100644 index 000000000000..ade0be26d986 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index dcb6ad5e6a9e..d6c3fe4c5051 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index a6e1502bce78..c418dfa10386 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { @@ -4953,6 +4953,175 @@ ], "title": "generativelanguage_v1beta_generated_permission_service_update_permission_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient", + "shortName": "PredictionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient", + "shortName": "PredictionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index d3fc92d09eaa..5b7d0a0509b4 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index d9c470b9e07c..91de9e353f90 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py index dcb5cdfbb55c..8e69225c75de 100644 --- a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py @@ -83,6 +83,7 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'list_models': ('page_size', 'page_token', ), 'list_permissions': ('parent', 'page_size', 'page_token', ), 'list_tuned_models': ('page_size', 'page_token', 'filter', ), + 'predict': ('model', 'instances', 'parameters', ), 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index 46ecbbbaeafa..1cc7b01d0c19 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -3349,11 +3349,15 @@ def test_create_cached_content_rest(request_type): "enum": ["enum_value1", "enum_value2"], "items": {}, "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], @@ -4097,11 +4101,15 @@ def test_update_cached_content_rest(request_type): "enum": ["enum_value1", "enum_value2"], "items": {}, "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index c8b4aed2becb..93ee6b5f5eb4 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -2059,6 +2059,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.get_tuned_model(request) @@ -2078,6 +2079,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_empty_call(): @@ -2183,6 +2185,7 @@ async def test_get_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model() @@ -2258,6 +2261,7 @@ async def test_get_tuned_model_async( top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model(request) @@ -2277,6 +2281,7 @@ async def test_get_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -3346,6 +3351,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.update_tuned_model(request) @@ -3365,6 +3371,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_empty_call(): @@ -3476,6 +3483,7 @@ async def test_update_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model() @@ -3553,6 +3561,7 @@ async def test_update_tuned_model_async( top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model(request) @@ -3572,6 +3581,7 @@ async def test_update_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -4757,6 +4767,7 @@ def test_get_tuned_model_rest(request_type): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -4780,6 +4791,7 @@ def test_get_tuned_model_rest(request_type): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_rest_use_cached_wrapped_rpc(): @@ -5372,6 +5384,7 @@ def test_create_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5784,6 +5797,7 @@ def test_update_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5865,6 +5879,7 @@ def get_message_fields(field): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -5888,6 +5903,7 @@ def get_message_fields(field): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py new file mode 100644 index 000000000000..5532205c7f0a --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py @@ -0,0 +1,2586 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta.types import prediction_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PredictionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PredictionServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PredictionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PredictionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PredictionServiceClient._get_client_cert_source(None, False) is None + assert ( + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PredictionServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PredictionServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PredictionServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PredictionServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PredictionServiceClient._get_universe_domain(None, None) + == PredictionServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PredictionServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), + ], +) +def test_prediction_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PredictionServiceGrpcTransport, "grpc"), + (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PredictionServiceRestTransport, "rest"), + ], +) +def test_prediction_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), + ], +) +def test_prediction_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_prediction_service_client_get_transport_class(): + transport = PredictionServiceClient.get_transport_class() + available_transports = [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceRestTransport, + ] + assert transport in available_transports + + transport = PredictionServiceClient.get_transport_class("grpc") + assert transport == transports.PredictionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_prediction_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceAsyncClient), +) +def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +def test_prediction_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_prediction_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_prediction_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PredictionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_prediction_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + prediction_service.PredictRequest, + dict, + ], +) +def test_predict(request_type, transport: str = "grpc"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + response = client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() + + +def test_predict_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = prediction_service.PredictRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest( + model="model_value", + ) + + +def test_predict_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.predict in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.predict] = mock_rpc + request = {} + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() + + +@pytest.mark.asyncio +async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.predict + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.predict + ] = mock_rpc + + request = {} + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_async( + transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest +): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +@pytest.mark.asyncio +async def test_predict_async_from_dict(): + await test_predict_async(request_type=dict) + + +def test_predict_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = prediction_service.PredictResponse() + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_predict_field_headers_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_predict_flattened(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +def test_predict_flattened_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +@pytest.mark.asyncio +async def test_predict_flattened_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_predict_flattened_error_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + prediction_service.PredictRequest, + dict, + ], +) +def test_predict_rest(request_type): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.predict(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.predict in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.predict] = mock_rpc + + request = {} + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): + transport_class = transports.PredictionServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).predict._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).predict._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.predict(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_predict_rest_unset_required_fields(): + transport = transports.PredictionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.predict._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "instances", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_predict_rest_interceptors(null_interceptor): + transport = transports.PredictionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PredictionServiceRestInterceptor(), + ) + client = PredictionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict" + ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "pre_predict" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = prediction_service.PredictRequest.pb( + prediction_service.PredictRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = prediction_service.PredictResponse.to_json( + prediction_service.PredictResponse() + ) + + request = prediction_service.PredictRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = prediction_service.PredictResponse() + + client.predict( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_predict_rest_bad_request( + transport: str = "rest", request_type=prediction_service.PredictRequest +): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.predict(request) + + +def test_predict_rest_flattened(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.predict(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:predict" % client.transport._host, args[1] + ) + + +def test_predict_rest_flattened_error(transport: str = "rest"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +def test_predict_rest_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PredictionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PredictionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PredictionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PredictionServiceGrpcTransport, + ) + + +def test_prediction_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PredictionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_prediction_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PredictionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("predict",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_prediction_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PredictionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_prediction_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PredictionServiceTransport() + adc.assert_called_once() + + +def test_prediction_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PredictionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_prediction_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PredictionServiceGrpcTransport, grpc_helpers), + (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_prediction_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PredictionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_prediction_service_host_no_port(transport_name): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_prediction_service_host_with_port(transport_name): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_prediction_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PredictionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PredictionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.predict._session + session2 = client2.transport.predict._session + assert session1 != session2 + + +def test_prediction_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_prediction_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = PredictionServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = PredictionServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PredictionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PredictionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PredictionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PredictionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PredictionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PredictionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = PredictionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PredictionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PredictionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PredictionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PredictionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PredictionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PredictionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From e727cc0e98e37d55882215182f86c2a7d23154ef Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 20:52:35 -0400 Subject: [PATCH 32/59] feat: [google-cloud-gdchardwaremanagement] add an order type field to distinguish a fulfillment request from a sales inquiry (#13090) BEGIN_COMMIT_OVERRIDE feat: add an order type field to distinguish a fulfillment request from a sales inquiry feat: add support to mark comments as read or unread feat: rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED docs: clarify how access_times are used END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add support to mark comments as read or unread feat: rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED docs: clarify how access_times are used PiperOrigin-RevId: 675275984 Source-Link: https://github.com/googleapis/googleapis/commit/fde103ca32090688564bc86d8a430450d59dded7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a58cb7449fccaa7e9c76bd5a137e79aefa45ede7 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdkY2hhcmR3YXJlbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiYTU4Y2I3NDQ5ZmNjYWE3ZTljNzZiZDVhMTM3ZTc5YWVmYTQ1ZWRlNyJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../cloud/gdchardwaremanagement/__init__.py | 6 + .../gdchardwaremanagement_v1alpha/__init__.py | 6 + .../gapic_metadata.json | 15 + .../gdc_hardware_management/async_client.py | 120 + .../gdc_hardware_management/client.py | 117 + .../transports/base.py | 14 + .../transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../transports/rest.py | 134 + .../types/__init__.py | 6 + .../types/resources.py | 145 +- .../types/service.py | 85 + ...nagement_record_action_on_comment_async.py | 53 + ...anagement_record_action_on_comment_sync.py | 53 + ...e.cloud.gdchardwaremanagement.v1alpha.json | 169 + ..._gdchardwaremanagement_v1alpha_keywords.py | 3 +- .../test_gdc_hardware_management.py | 8956 +++++++++-------- 17 files changed, 5851 insertions(+), 4098 deletions(-) create mode 100644 packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py create mode 100644 packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py index 8f1261e786ed..6804d0c2e133 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py @@ -29,6 +29,7 @@ Comment, Contact, Dimensions, + Entity, Hardware, HardwareConfig, HardwareGroup, @@ -44,6 +45,7 @@ SkuConfig, SkuInstance, Subnet, + SubscriptionConfig, TimePeriod, Zone, ZoneNetworkConfig, @@ -84,6 +86,7 @@ ListZonesRequest, ListZonesResponse, OperationMetadata, + RecordActionOnCommentRequest, SignalZoneStateRequest, SubmitOrderRequest, UpdateHardwareGroupRequest, @@ -114,9 +117,11 @@ "SkuConfig", "SkuInstance", "Subnet", + "SubscriptionConfig", "TimePeriod", "Zone", "ZoneNetworkConfig", + "Entity", "PowerSupply", "CreateCommentRequest", "CreateHardwareGroupRequest", @@ -153,6 +158,7 @@ "ListZonesRequest", "ListZonesResponse", "OperationMetadata", + "RecordActionOnCommentRequest", "SignalZoneStateRequest", "SubmitOrderRequest", "UpdateHardwareGroupRequest", diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py index adfdd5d5be4b..4be645a5703c 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py @@ -27,6 +27,7 @@ Comment, Contact, Dimensions, + Entity, Hardware, HardwareConfig, HardwareGroup, @@ -42,6 +43,7 @@ SkuConfig, SkuInstance, Subnet, + SubscriptionConfig, TimePeriod, Zone, ZoneNetworkConfig, @@ -82,6 +84,7 @@ ListZonesRequest, ListZonesResponse, OperationMetadata, + RecordActionOnCommentRequest, SignalZoneStateRequest, SubmitOrderRequest, UpdateHardwareGroupRequest, @@ -107,6 +110,7 @@ "DeleteOrderRequest", "DeleteZoneRequest", "Dimensions", + "Entity", "GDCHardwareManagementClient", "GetChangeLogEntryRequest", "GetCommentRequest", @@ -143,6 +147,7 @@ "OrganizationContact", "PowerSupply", "RackSpace", + "RecordActionOnCommentRequest", "SignalZoneStateRequest", "Site", "Sku", @@ -150,6 +155,7 @@ "SkuInstance", "SubmitOrderRequest", "Subnet", + "SubscriptionConfig", "TimePeriod", "UpdateHardwareGroupRequest", "UpdateHardwareRequest", diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json index b5713b6e1b21..3d9830bcaec9 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json @@ -140,6 +140,11 @@ "list_zones" ] }, + "RecordActionOnComment": { + "methods": [ + "record_action_on_comment" + ] + }, "SignalZoneState": { "methods": [ "signal_zone_state" @@ -310,6 +315,11 @@ "list_zones" ] }, + "RecordActionOnComment": { + "methods": [ + "record_action_on_comment" + ] + }, "SignalZoneState": { "methods": [ "signal_zone_state" @@ -480,6 +490,11 @@ "list_zones" ] }, + "RecordActionOnComment": { + "methods": [ + "record_action_on_comment" + ] + }, "SignalZoneState": { "methods": [ "signal_zone_state" diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py index b0755c816a88..7c2f13b80903 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py @@ -3237,6 +3237,126 @@ async def sample_create_comment(): # Done; return the response. return response + async def record_action_on_comment( + self, + request: Optional[Union[service.RecordActionOnCommentRequest, dict]] = None, + *, + name: Optional[str] = None, + action_type: Optional[service.RecordActionOnCommentRequest.ActionType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = await client.record_action_on_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest, dict]]): + The request object. A request to record an action on a + comment. + name (:class:`str`): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + action_type (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType`): + Required. The action type of the + recorded action. + + This corresponds to the ``action_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Comment: + A comment on an order. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, action_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RecordActionOnCommentRequest): + request = service.RecordActionOnCommentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if action_type is not None: + request.action_type = action_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.record_action_on_comment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_change_log_entries( self, request: Optional[Union[service.ListChangeLogEntriesRequest, dict]] = None, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py index 44e392dffb0f..fe8281c62c7d 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py @@ -3734,6 +3734,123 @@ def sample_create_comment(): # Done; return the response. return response + def record_action_on_comment( + self, + request: Optional[Union[service.RecordActionOnCommentRequest, dict]] = None, + *, + name: Optional[str] = None, + action_type: Optional[service.RecordActionOnCommentRequest.ActionType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = client.record_action_on_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest, dict]): + The request object. A request to record an action on a + comment. + name (str): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + action_type (google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType): + Required. The action type of the + recorded action. + + This corresponds to the ``action_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Comment: + A comment on an order. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, action_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RecordActionOnCommentRequest): + request = service.RecordActionOnCommentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if action_type is not None: + request.action_type = action_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.record_action_on_comment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def list_change_log_entries( self, request: Optional[Union[service.ListChangeLogEntriesRequest, dict]] = None, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py index 2aacf3ed6d5a..1af0337b882d 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py @@ -407,6 +407,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.record_action_on_comment: gapic_v1.method.wrap_method( + self.record_action_on_comment, + default_timeout=None, + client_info=client_info, + ), self.list_change_log_entries: gapic_v1.method.wrap_method( self.list_change_log_entries, default_retry=retries.Retry( @@ -753,6 +758,15 @@ def create_comment( ]: raise NotImplementedError() + @property + def record_action_on_comment( + self, + ) -> Callable[ + [service.RecordActionOnCommentRequest], + Union[resources.Comment, Awaitable[resources.Comment]], + ]: + raise NotImplementedError() + @property def list_change_log_entries( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py index 50edb419994a..a9126a268ef9 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py @@ -845,6 +845,37 @@ def create_comment( ) return self._stubs["create_comment"] + @property + def record_action_on_comment( + self, + ) -> Callable[[service.RecordActionOnCommentRequest], resources.Comment]: + r"""Return a callable for the record action on comment method over gRPC. + + Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + Returns: + Callable[[~.RecordActionOnCommentRequest], + ~.Comment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "record_action_on_comment" not in self._stubs: + self._stubs["record_action_on_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/RecordActionOnComment", + request_serializer=service.RecordActionOnCommentRequest.serialize, + response_deserializer=resources.Comment.deserialize, + ) + return self._stubs["record_action_on_comment"] + @property def list_change_log_entries( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py index 1dfcb6395532..12579c465cb7 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py @@ -868,6 +868,37 @@ def create_comment( ) return self._stubs["create_comment"] + @property + def record_action_on_comment( + self, + ) -> Callable[[service.RecordActionOnCommentRequest], Awaitable[resources.Comment]]: + r"""Return a callable for the record action on comment method over gRPC. + + Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + Returns: + Callable[[~.RecordActionOnCommentRequest], + Awaitable[~.Comment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "record_action_on_comment" not in self._stubs: + self._stubs["record_action_on_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/RecordActionOnComment", + request_serializer=service.RecordActionOnCommentRequest.serialize, + response_deserializer=resources.Comment.deserialize, + ) + return self._stubs["record_action_on_comment"] + @property def list_change_log_entries( self, @@ -1411,6 +1442,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.record_action_on_comment: gapic_v1.method_async.wrap_method( + self.record_action_on_comment, + default_timeout=None, + client_info=client_info, + ), self.list_change_log_entries: gapic_v1.method_async.wrap_method( self.list_change_log_entries, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py index b95064ee417e..25c96778ddb7 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py @@ -280,6 +280,14 @@ def post_list_zones(self, response): logging.log(f"Received response: {response}") return response + def pre_record_action_on_comment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_record_action_on_comment(self, response): + logging.log(f"Received response: {response}") + return response + def pre_signal_zone_state(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -892,6 +900,29 @@ def post_list_zones( """ return response + def pre_record_action_on_comment( + self, + request: service.RecordActionOnCommentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RecordActionOnCommentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for record_action_on_comment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_record_action_on_comment( + self, response: resources.Comment + ) -> resources.Comment: + """Post-rpc interceptor for record_action_on_comment + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + def pre_signal_zone_state( self, request: service.SignalZoneStateRequest, @@ -3615,6 +3646,101 @@ def __call__( resp = self._interceptor.post_list_zones(resp) return resp + class _RecordActionOnComment(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("RecordActionOnComment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RecordActionOnCommentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Call the record action on comment method over HTTP. + + Args: + request (~.service.RecordActionOnCommentRequest): + The request object. A request to record an action on a + comment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Comment: + A comment on an order. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}:recordAction", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_record_action_on_comment( + request, metadata + ) + pb_request = service.RecordActionOnCommentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Comment() + pb_resp = resources.Comment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_record_action_on_comment(resp) + return resp + class _SignalZoneState(GDCHardwareManagementRestStub): def __hash__(self): return hash("SignalZoneState") @@ -4483,6 +4609,14 @@ def list_zones( # In C++ this would require a dynamic_cast return self._ListZones(self._session, self._host, self._interceptor) # type: ignore + @property + def record_action_on_comment( + self, + ) -> Callable[[service.RecordActionOnCommentRequest], resources.Comment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RecordActionOnComment(self._session, self._host, self._interceptor) # type: ignore + @property def signal_zone_state( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py index 920359d3bd38..b800bd4e5181 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py @@ -18,6 +18,7 @@ Comment, Contact, Dimensions, + Entity, Hardware, HardwareConfig, HardwareGroup, @@ -33,6 +34,7 @@ SkuConfig, SkuInstance, Subnet, + SubscriptionConfig, TimePeriod, Zone, ZoneNetworkConfig, @@ -73,6 +75,7 @@ ListZonesRequest, ListZonesResponse, OperationMetadata, + RecordActionOnCommentRequest, SignalZoneStateRequest, SubmitOrderRequest, UpdateHardwareGroupRequest, @@ -101,9 +104,11 @@ "SkuConfig", "SkuInstance", "Subnet", + "SubscriptionConfig", "TimePeriod", "Zone", "ZoneNetworkConfig", + "Entity", "PowerSupply", "CreateCommentRequest", "CreateHardwareGroupRequest", @@ -140,6 +145,7 @@ "ListZonesRequest", "ListZonesResponse", "OperationMetadata", + "RecordActionOnCommentRequest", "SignalZoneStateRequest", "SubmitOrderRequest", "UpdateHardwareGroupRequest", diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py index 54d902b9ae68..9cf90c8096c5 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py @@ -29,6 +29,7 @@ package="google.cloud.gdchardwaremanagement.v1alpha", manifest={ "PowerSupply", + "Entity", "Order", "Site", "HardwareGroup", @@ -50,6 +51,7 @@ "Dimensions", "RackSpace", "HardwareLocation", + "SubscriptionConfig", }, ) @@ -70,6 +72,25 @@ class PowerSupply(proto.Enum): POWER_SUPPLY_DC = 2 +class Entity(proto.Enum): + r"""Entity is used to denote an organization or party. + + Values: + ENTITY_UNSPECIFIED (0): + Entity is unspecified. + GOOGLE (1): + Google. + CUSTOMER (2): + Customer. + VENDOR (3): + Vendor. + """ + ENTITY_UNSPECIFIED = 0 + GOOGLE = 1 + CUSTOMER = 2 + VENDOR = 3 + + class Order(proto.Message): r"""An order for GDC hardware. @@ -140,6 +161,9 @@ class State(proto.Enum): has not been submitted yet. SUBMITTED (2): Order has been submitted to Google. + INFO_COMPLETE (12): + All information required from the customer + for fulfillment of the order is complete. ACCEPTED (3): Order has been accepted by Google. ADDITIONAL_INFO_NEEDED (4): @@ -167,6 +191,7 @@ class State(proto.Enum): STATE_UNSPECIFIED = 0 DRAFT = 1 SUBMITTED = 2 + INFO_COMPLETE = 12 ACCEPTED = 3 ADDITIONAL_INFO_NEEDED = 4 BUILDING = 5 @@ -297,17 +322,30 @@ class Site(proto.Message): Optional. The time periods when the site is accessible. If this field is empty, the site is accessible at all times. + + This field is used by Google to schedule the + initial installation as well as any later + hardware maintenance. You may update this at any + time. For example, if the initial installation + is requested during off-hours but maintenance + should be performed during regular business + hours, you should update the access times after + initial installation is complete. notes (str): Optional. Any additional notes for this Site. Please include information about: - - - security or access restrictions - - any regulations affecting the technicians - visiting the site - - any special process or approval required to - move the equipment - - whether a representative will be available - during site visits + - security or access restrictions + - any regulations affecting the technicians + visiting the site + - any special process or approval required to + move the equipment + - whether a representative will be available + during site visits + + customer_site_id (str): + Optional. Customer defined identifier for + this Site. This can be used to identify the site + in the customer's own systems. """ name: str = proto.Field( @@ -355,6 +393,10 @@ class Site(proto.Message): proto.STRING, number=27, ) + customer_site_id: str = proto.Field( + proto.STRING, + number=28, + ) class HardwareGroup(proto.Message): @@ -679,6 +721,14 @@ class Comment(proto.Message): text (str): Required. Text of this comment. The length of text must be <= 1000 characters. + customer_viewed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the first time this + comment was viewed by the customer. If the + comment wasn't viewed then this timestamp will + be unset. + author_entity (google.cloud.gdchardwaremanagement_v1alpha.types.Entity): + Output only. The entity the author belongs + to. """ name: str = proto.Field( @@ -703,6 +753,16 @@ class Comment(proto.Message): proto.STRING, number=5, ) + customer_viewed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + author_entity: "Entity" = proto.Field( + proto.ENUM, + number=7, + enum="Entity", + ) class ChangeLogEntry(proto.Message): @@ -881,6 +941,9 @@ class Zone(proto.Message): globally_unique_id (str): Output only. Globally unique identifier generated for this Edge Zone. + subscription_configs (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.SubscriptionConfig]): + Output only. Subscription configurations for + this zone. """ class State(proto.Enum): @@ -960,6 +1023,11 @@ class State(proto.Enum): proto.STRING, number=12, ) + subscription_configs: MutableSequence["SubscriptionConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="SubscriptionConfig", + ) class OrganizationContact(proto.Message): @@ -1502,4 +1570,65 @@ class HardwareLocation(proto.Message): ) +class SubscriptionConfig(proto.Message): + r"""A message to store a subscription configuration. + + Attributes: + subscription_id (str): + Output only. The unique identifier of the + subscription. + billing_id (str): + Output only. The Google Cloud Billing ID that + the subscription is created under. + state (google.cloud.gdchardwaremanagement_v1alpha.types.SubscriptionConfig.SubscriptionState): + Output only. The current state of the + subscription. + """ + + class SubscriptionState(proto.Enum): + r"""Enum to represent the state of the subscription. + + Values: + SUBSCRIPTION_STATE_UNSPECIFIED (0): + State is unspecified. + ACTIVE (1): + Active state means that the subscription has + been created successfully and billing is + happening. + INACTIVE (2): + Inactive means that the subscription has been + created successfully, but billing has not + started yet. + ERROR (3): + The subscription is in an erroneous state. + FAILED_TO_RETRIEVE (4): + The subscription state failed to be + retrieved. This may be a transient issue. The + user should retry the request. + COMPLETED (5): + The subscription has been completed, because + it has reached the end date. + """ + SUBSCRIPTION_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + ERROR = 3 + FAILED_TO_RETRIEVE = 4 + COMPLETED = 5 + + subscription_id: str = proto.Field( + proto.STRING, + number=1, + ) + billing_id: str = proto.Field( + proto.STRING, + number=2, + ) + state: SubscriptionState = proto.Field( + proto.ENUM, + number=3, + enum=SubscriptionState, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py index 9821128023c7..88c0224f1a48 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py @@ -54,6 +54,7 @@ "ListCommentsResponse", "GetCommentRequest", "CreateCommentRequest", + "RecordActionOnCommentRequest", "ListChangeLogEntriesRequest", "ListChangeLogEntriesResponse", "GetChangeLogEntryRequest", @@ -280,8 +281,44 @@ class SubmitOrderRequest(proto.Message): request_id (str): Optional. An optional unique identifier for this request. See `AIP-155 `__. + type_ (google.cloud.gdchardwaremanagement_v1alpha.types.SubmitOrderRequest.Type): + Optional. Type of this request. If unset, the request type + is assumed to be ``INFO_PENDING``. """ + class Type(proto.Enum): + r"""Valid types of submit order request. + + Values: + TYPE_UNSPECIFIED (0): + Request type is unspecified. This should not + be used. + INFO_PENDING (1): + Use this request type to submit your order + and initiate conversation with Google. After + this submission, you will not be able to modify + the number or SKU of your ordered hardware. + Please note that this order will not be ready + for fulfillment yet until you provide more + information, such as zone network configuration, + hardware physical and installation information, + etc. + If you are submitting an order for a SKU type of + RACK, please use this request type, as + additional information will be required outside + of the API. + INFO_COMPLETE (2): + Use this request type if and when you are ready to submit + your order for fulfillment. In addition to the information + required for ``INFO_PENDING``, the order must contain all + required information, such as zone network configuration, + hardware physical and installation information, etc. Further + changes to any order information will no longer be allowed. + """ + TYPE_UNSPECIFIED = 0 + INFO_PENDING = 1 + INFO_COMPLETE = 2 + name: str = proto.Field( proto.STRING, number=1, @@ -290,6 +327,11 @@ class SubmitOrderRequest(proto.Message): proto.STRING, number=2, ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) class ListSitesRequest(proto.Message): @@ -963,6 +1005,44 @@ class CreateCommentRequest(proto.Message): ) +class RecordActionOnCommentRequest(proto.Message): + r"""A request to record an action on a comment. + + Attributes: + name (str): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + action_type (google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType): + Required. The action type of the recorded + action. + """ + + class ActionType(proto.Enum): + r"""Valid action types of Comment. + + Values: + ACTION_TYPE_UNSPECIFIED (0): + Action is unspecified. + READ (1): + Mark comment as read. + UNREAD (2): + Mark comment as unread. + """ + ACTION_TYPE_UNSPECIFIED = 0 + READ = 1 + UNREAD = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + action_type: ActionType = proto.Field( + proto.ENUM, + number=2, + enum=ActionType, + ) + + class ListChangeLogEntriesRequest(proto.Message): r"""A request to list change log entries. @@ -1354,12 +1434,17 @@ class StateSignal(proto.Enum): Values: STATE_SIGNAL_UNSPECIFIED (0): State signal of the zone is unspecified. + FACTORY_TURNUP_CHECKS_PASSED (1): + The Zone is ready for site turnup. READY_FOR_SITE_TURNUP (1): The Zone is ready for site turnup. + Deprecated, but not deleted. FACTORY_TURNUP_CHECKS_FAILED (2): The Zone failed in factory turnup checks. """ + _pb_options = {"allow_alias": True} STATE_SIGNAL_UNSPECIFIED = 0 + FACTORY_TURNUP_CHECKS_PASSED = 1 READY_FOR_SITE_TURNUP = 1 FACTORY_TURNUP_CHECKS_FAILED = 2 diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py new file mode 100644 index 000000000000..64fa35ff6d8a --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecordActionOnComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = await client.record_action_on_comment(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py new file mode 100644 index 000000000000..6658459e3cda --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecordActionOnComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = client.record_action_on_comment(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json index 9716952b0d31..588b33c7fb6e 100644 --- a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -4293,6 +4293,175 @@ ], "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.record_action_on_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.RecordActionOnComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "RecordActionOnComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "action_type", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment", + "shortName": "record_action_on_comment" + }, + "description": "Sample for RecordActionOnComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.record_action_on_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.RecordActionOnComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "RecordActionOnComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "action_type", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment", + "shortName": "record_action_on_comment" + }, + "description": "Sample for RecordActionOnComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py b/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py index 0a8496e2ccc3..c10e8cef3d54 100644 --- a/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py +++ b/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py @@ -65,8 +65,9 @@ class gdchardwaremanagementCallTransformer(cst.CSTTransformer): 'list_sites': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_skus': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'record_action_on_comment': ('name', 'action_type', ), 'signal_zone_state': ('name', 'state_signal', 'request_id', ), - 'submit_order': ('name', 'request_id', ), + 'submit_order': ('name', 'request_id', 'type_', ), 'update_hardware': ('update_mask', 'hardware', 'request_id', ), 'update_hardware_group': ('update_mask', 'hardware_group', 'request_id', ), 'update_order': ('update_mask', 'order', 'request_id', ), diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py index 72d9a360aa19..dc7856c427c9 100644 --- a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py @@ -4275,6 +4275,7 @@ def test_get_site(request_type, transport: str = "grpc"): description="description_value", google_maps_pin_uri="google_maps_pin_uri_value", notes="notes_value", + customer_site_id="customer_site_id_value", ) response = client.get_site(request) @@ -4291,6 +4292,7 @@ def test_get_site(request_type, transport: str = "grpc"): assert response.description == "description_value" assert response.google_maps_pin_uri == "google_maps_pin_uri_value" assert response.notes == "notes_value" + assert response.customer_site_id == "customer_site_id_value" def test_get_site_empty_call(): @@ -4394,6 +4396,7 @@ async def test_get_site_empty_call_async(): description="description_value", google_maps_pin_uri="google_maps_pin_uri_value", notes="notes_value", + customer_site_id="customer_site_id_value", ) ) response = await client.get_site() @@ -4465,6 +4468,7 @@ async def test_get_site_async( description="description_value", google_maps_pin_uri="google_maps_pin_uri_value", notes="notes_value", + customer_site_id="customer_site_id_value", ) ) response = await client.get_site(request) @@ -4482,6 +4486,7 @@ async def test_get_site_async( assert response.description == "description_value" assert response.google_maps_pin_uri == "google_maps_pin_uri_value" assert response.notes == "notes_value" + assert response.customer_site_id == "customer_site_id_value" @pytest.mark.asyncio @@ -10302,6 +10307,7 @@ def test_get_comment(request_type, transport: str = "grpc"): name="name_value", author="author_value", text="text_value", + author_entity=resources.Entity.GOOGLE, ) response = client.get_comment(request) @@ -10316,6 +10322,7 @@ def test_get_comment(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.author == "author_value" assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE def test_get_comment_empty_call(): @@ -10417,6 +10424,7 @@ async def test_get_comment_empty_call_async(): name="name_value", author="author_value", text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) response = await client.get_comment() @@ -10488,6 +10496,7 @@ async def test_get_comment_async( name="name_value", author="author_value", text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) response = await client.get_comment(request) @@ -10503,6 +10512,7 @@ async def test_get_comment_async( assert response.name == "name_value" assert response.author == "author_value" assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE @pytest.mark.asyncio @@ -11043,11 +11053,11 @@ async def test_create_comment_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.ListChangeLogEntriesRequest, + service.RecordActionOnCommentRequest, dict, ], ) -def test_list_change_log_entries(request_type, transport: str = "grpc"): +def test_record_action_on_comment(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11059,28 +11069,32 @@ def test_list_change_log_entries(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListChangeLogEntriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) - response = client.list_change_log_entries(request) + response = client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChangeLogEntriesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE -def test_list_change_log_entries_empty_call(): +def test_record_action_on_comment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -11090,18 +11104,18 @@ def test_list_change_log_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_change_log_entries() + client.record_action_on_comment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListChangeLogEntriesRequest() + assert args[0] == service.RecordActionOnCommentRequest() -def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): +def test_record_action_on_comment_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -11112,32 +11126,26 @@ def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListChangeLogEntriesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = service.RecordActionOnCommentRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_change_log_entries(request=request) + client.record_action_on_comment(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListChangeLogEntriesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == service.RecordActionOnCommentRequest( + name="name_value", ) -def test_list_change_log_entries_use_cached_wrapped_rpc(): +def test_record_action_on_comment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11152,7 +11160,7 @@ def test_list_change_log_entries_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_change_log_entries + client._transport.record_action_on_comment in client._transport._wrapped_methods ) @@ -11162,15 +11170,15 @@ def test_list_change_log_entries_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_change_log_entries + client._transport.record_action_on_comment ] = mock_rpc request = {} - client.list_change_log_entries(request) + client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_change_log_entries(request) + client.record_action_on_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11178,7 +11186,7 @@ def test_list_change_log_entries_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_change_log_entries_empty_call_async(): +async def test_record_action_on_comment_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -11188,23 +11196,25 @@ async def test_list_change_log_entries_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) - response = await client.list_change_log_entries() + response = await client.record_action_on_comment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListChangeLogEntriesRequest() + assert args[0] == service.RecordActionOnCommentRequest() @pytest.mark.asyncio -async def test_list_change_log_entries_async_use_cached_wrapped_rpc( +async def test_record_action_on_comment_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11221,7 +11231,7 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_change_log_entries + client._client._transport.record_action_on_comment in client._client._transport._wrapped_methods ) @@ -11229,16 +11239,16 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_change_log_entries + client._client._transport.record_action_on_comment ] = mock_rpc request = {} - await client.list_change_log_entries(request) + await client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_change_log_entries(request) + await client.record_action_on_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11246,8 +11256,8 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_change_log_entries_async( - transport: str = "grpc_asyncio", request_type=service.ListChangeLogEntriesRequest +async def test_record_action_on_comment_async( + transport: str = "grpc_asyncio", request_type=service.RecordActionOnCommentRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11260,51 +11270,55 @@ async def test_list_change_log_entries_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) - response = await client.list_change_log_entries(request) + response = await client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChangeLogEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE @pytest.mark.asyncio -async def test_list_change_log_entries_async_from_dict(): - await test_list_change_log_entries_async(request_type=dict) +async def test_record_action_on_comment_async_from_dict(): + await test_record_action_on_comment_async(request_type=dict) -def test_list_change_log_entries_field_headers(): +def test_record_action_on_comment_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: - call.return_value = service.ListChangeLogEntriesResponse() - client.list_change_log_entries(request) + call.return_value = resources.Comment() + client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11315,30 +11329,28 @@ def test_list_change_log_entries_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_change_log_entries_field_headers_async(): +async def test_record_action_on_comment_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse() - ) - await client.list_change_log_entries(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Comment()) + await client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11349,37 +11361,41 @@ async def test_list_change_log_entries_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_change_log_entries_flattened(): +def test_record_action_on_comment_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListChangeLogEntriesResponse() + call.return_value = resources.Comment() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_change_log_entries( - parent="parent_value", + client.record_action_on_comment( + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].action_type + mock_val = service.RecordActionOnCommentRequest.ActionType.READ assert arg == mock_val -def test_list_change_log_entries_flattened_error(): +def test_record_action_on_comment_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11387,45 +11403,48 @@ def test_list_change_log_entries_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_change_log_entries( - service.ListChangeLogEntriesRequest(), - parent="parent_value", + client.record_action_on_comment( + service.RecordActionOnCommentRequest(), + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) @pytest.mark.asyncio -async def test_list_change_log_entries_flattened_async(): +async def test_record_action_on_comment_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListChangeLogEntriesResponse() + call.return_value = resources.Comment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Comment()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_change_log_entries( - parent="parent_value", + response = await client.record_action_on_comment( + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].action_type + mock_val = service.RecordActionOnCommentRequest.ActionType.READ assert arg == mock_val @pytest.mark.asyncio -async def test_list_change_log_entries_flattened_error_async(): +async def test_record_action_on_comment_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11433,276 +11452,75 @@ async def test_list_change_log_entries_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_change_log_entries( - service.ListChangeLogEntriesRequest(), - parent="parent_value", + await client.record_action_on_comment( + service.RecordActionOnCommentRequest(), + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) -def test_list_change_log_entries_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListChangeLogEntriesRequest, + dict, + ], +) +def test_list_change_log_entries(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_change_log_entries), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_change_log_entries(request={}, retry=retry, timeout=timeout) + response = client.list_change_log_entries(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListChangeLogEntriesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.ChangeLogEntry) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChangeLogEntriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_change_log_entries_pages(transport_name: str = "grpc"): +def test_list_change_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_change_log_entries), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_change_log_entries(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_change_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListChangeLogEntriesRequest() -@pytest.mark.asyncio -async def test_list_change_log_entries_async_pager(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_change_log_entries), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_change_log_entries( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.ChangeLogEntry) for i in responses) - - -@pytest.mark.asyncio -async def test_list_change_log_entries_async_pages(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_change_log_entries), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_change_log_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetChangeLogEntryRequest, - dict, - ], -) -def test_get_change_log_entry(request_type, transport: str = "grpc"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.ChangeLogEntry( - name="name_value", - log="log_value", - ) - response = client.get_change_log_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetChangeLogEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.ChangeLogEntry) - assert response.name == "name_value" - assert response.log == "log_value" - - -def test_get_change_log_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_change_log_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetChangeLogEntryRequest() - - -def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): +def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -11713,26 +11531,32 @@ def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetChangeLogEntryRequest( - name="name_value", + request = service.ListChangeLogEntriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_change_log_entry(request=request) + client.list_change_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetChangeLogEntryRequest( - name="name_value", + assert args[0] == service.ListChangeLogEntriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_change_log_entry_use_cached_wrapped_rpc(): +def test_list_change_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11747,7 +11571,8 @@ def test_get_change_log_entry_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_change_log_entry in client._transport._wrapped_methods + client._transport.list_change_log_entries + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -11756,15 +11581,15 @@ def test_get_change_log_entry_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_change_log_entry + client._transport.list_change_log_entries ] = mock_rpc request = {} - client.get_change_log_entry(request) + client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_change_log_entry(request) + client.list_change_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11772,7 +11597,7 @@ def test_get_change_log_entry_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_change_log_entry_empty_call_async(): +async def test_list_change_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -11782,23 +11607,23 @@ async def test_get_change_log_entry_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry( - name="name_value", - log="log_value", + service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_change_log_entry() + response = await client.list_change_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetChangeLogEntryRequest() + assert args[0] == service.ListChangeLogEntriesRequest() @pytest.mark.asyncio -async def test_get_change_log_entry_async_use_cached_wrapped_rpc( +async def test_list_change_log_entries_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11815,7 +11640,7 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_change_log_entry + client._client._transport.list_change_log_entries in client._client._transport._wrapped_methods ) @@ -11823,16 +11648,16 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_change_log_entry + client._client._transport.list_change_log_entries ] = mock_rpc request = {} - await client.get_change_log_entry(request) + await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_change_log_entry(request) + await client.list_change_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11840,8 +11665,8 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_change_log_entry_async( - transport: str = "grpc_asyncio", request_type=service.GetChangeLogEntryRequest +async def test_list_change_log_entries_async( + transport: str = "grpc_asyncio", request_type=service.ListChangeLogEntriesRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11854,51 +11679,51 @@ async def test_get_change_log_entry_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry( - name="name_value", - log="log_value", + service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_change_log_entry(request) + response = await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetChangeLogEntryRequest() + request = service.ListChangeLogEntriesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ChangeLogEntry) - assert response.name == "name_value" - assert response.log == "log_value" + assert isinstance(response, pagers.ListChangeLogEntriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_change_log_entry_async_from_dict(): - await test_get_change_log_entry_async(request_type=dict) +async def test_list_change_log_entries_async_from_dict(): + await test_list_change_log_entries_async(request_type=dict) -def test_get_change_log_entry_field_headers(): +def test_list_change_log_entries_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetChangeLogEntryRequest() + request = service.ListChangeLogEntriesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: - call.return_value = resources.ChangeLogEntry() - client.get_change_log_entry(request) + call.return_value = service.ListChangeLogEntriesResponse() + client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11909,30 +11734,30 @@ def test_get_change_log_entry_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_change_log_entry_field_headers_async(): +async def test_list_change_log_entries_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetChangeLogEntryRequest() + request = service.ListChangeLogEntriesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry() + service.ListChangeLogEntriesResponse() ) - await client.get_change_log_entry(request) + await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11943,37 +11768,37 @@ async def test_get_change_log_entry_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_change_log_entry_flattened(): +def test_list_change_log_entries_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ChangeLogEntry() + call.return_value = service.ListChangeLogEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_change_log_entry( - name="name_value", + client.list_change_log_entries( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_change_log_entry_flattened_error(): +def test_list_change_log_entries_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11981,45 +11806,45 @@ def test_get_change_log_entry_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_change_log_entry( - service.GetChangeLogEntryRequest(), - name="name_value", + client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_change_log_entry_flattened_async(): +async def test_list_change_log_entries_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ChangeLogEntry() + call.return_value = service.ListChangeLogEntriesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry() + service.ListChangeLogEntriesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_change_log_entry( - name="name_value", + response = await client.list_change_log_entries( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_change_log_entry_flattened_error_async(): +async def test_list_change_log_entries_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12027,132 +11852,338 @@ async def test_get_change_log_entry_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_change_log_entry( - service.GetChangeLogEntryRequest(), - name="name_value", + await client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.ListSkusRequest, - dict, - ], -) -def test_list_skus(request_type, transport: str = "grpc"): +def test_list_change_log_entries_pager(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSkusResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, ) - response = client.list_skus(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListSkusRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_change_log_entries(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSkusPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in results) -def test_list_skus_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_change_log_entries_pages(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, ) - client.list_skus() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSkusRequest() + pages = list(client.list_change_log_entries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_skus_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( +@pytest.mark.asyncio +async def test_list_change_log_entries_async_pager(): + client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListSkusRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_change_log_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, ) - client.list_skus(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSkusRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + async_pager = await client.list_change_log_entries( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in responses) -def test_list_skus_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + +@pytest.mark.asyncio +async def test_list_change_log_entries_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_change_log_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetChangeLogEntryRequest, + dict, + ], +) +def test_get_change_log_entry(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry( + name="name_value", + log="log_value", + ) + response = client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetChangeLogEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" + + +def test_get_change_log_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_change_log_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest() + + +def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetChangeLogEntryRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_change_log_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest( + name="name_value", + ) + + +def test_get_change_log_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_skus in client._transport._wrapped_methods + assert ( + client._transport.get_change_log_entry in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_skus] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_change_log_entry + ] = mock_rpc request = {} - client.list_skus(request) + client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_skus(request) + client.get_change_log_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12160,7 +12191,7 @@ def test_list_skus_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_skus_empty_call_async(): +async def test_get_change_log_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -12169,22 +12200,26 @@ async def test_list_skus_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.ChangeLogEntry( + name="name_value", + log="log_value", ) ) - response = await client.list_skus() + response = await client.get_change_log_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSkusRequest() + assert args[0] == service.GetChangeLogEntryRequest() @pytest.mark.asyncio -async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_change_log_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12199,7 +12234,7 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.list_skus + client._client._transport.get_change_log_entry in client._client._transport._wrapped_methods ) @@ -12207,16 +12242,16 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_skus + client._client._transport.get_change_log_entry ] = mock_rpc request = {} - await client.list_skus(request) + await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_skus(request) + await client.get_change_log_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12224,8 +12259,8 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_list_skus_async( - transport: str = "grpc_asyncio", request_type=service.ListSkusRequest +async def test_get_change_log_entry_async( + transport: str = "grpc_asyncio", request_type=service.GetChangeLogEntryRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12237,48 +12272,52 @@ async def test_list_skus_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.ChangeLogEntry( + name="name_value", + log="log_value", ) ) - response = await client.list_skus(request) + response = await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListSkusRequest() + request = service.GetChangeLogEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSkusAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" @pytest.mark.asyncio -async def test_list_skus_async_from_dict(): - await test_list_skus_async(request_type=dict) +async def test_get_change_log_entry_async_from_dict(): + await test_get_change_log_entry_async(request_type=dict) -def test_list_skus_field_headers(): +def test_get_change_log_entry_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSkusRequest() + request = service.GetChangeLogEntryRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - call.return_value = service.ListSkusResponse() - client.list_skus(request) + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value = resources.ChangeLogEntry() + client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12289,28 +12328,30 @@ def test_list_skus_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_skus_field_headers_async(): +async def test_get_change_log_entry_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSkusRequest() + request = service.GetChangeLogEntryRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse() + resources.ChangeLogEntry() ) - await client.list_skus(request) + await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12321,35 +12362,37 @@ async def test_list_skus_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_skus_flattened(): +def test_get_change_log_entry_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSkusResponse() + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_skus( - parent="parent_value", + client.get_change_log_entry( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_skus_flattened_error(): +def test_get_change_log_entry_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12357,43 +12400,45 @@ def test_list_skus_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_skus( - service.ListSkusRequest(), - parent="parent_value", + client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_skus_flattened_async(): +async def test_get_change_log_entry_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSkusResponse() + call.return_value = resources.ChangeLogEntry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse() + resources.ChangeLogEntry() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_skus( - parent="parent_value", + response = await client.get_change_log_entry( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_skus_flattened_error_async(): +async def test_get_change_log_entry_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12401,330 +12446,132 @@ async def test_list_skus_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_skus( - service.ListSkusRequest(), - parent="parent_value", + await client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", ) -def test_list_skus_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListSkusRequest, + dict, + ], +) +def test_list_skus(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_skus(request={}, retry=retry, timeout=timeout) + response = client.list_skus(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListSkusRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Sku) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSkusPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_skus_pages(transport_name: str = "grpc"): +def test_list_skus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_skus(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_skus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest() -@pytest.mark.asyncio -async def test_list_skus_async_pager(): - client = GDCHardwareManagementAsyncClient( +def test_list_skus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListSkusRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_skus( - request={}, + client.list_skus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Sku) for i in responses) +def test_list_skus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -@pytest.mark.asyncio -async def test_list_skus_async_pages(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_skus(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetSkuRequest, - dict, - ], -) -def test_get_sku(request_type, transport: str = "grpc"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Sku( - name="name_value", - display_name="display_name_value", - description="description_value", - revision_id="revision_id_value", - is_active=True, - type_=resources.Sku.Type.RACK, - vcpu_count=1094, - ) - response = client.get_sku(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetSkuRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Sku) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.revision_id == "revision_id_value" - assert response.is_active is True - assert response.type_ == resources.Sku.Type.RACK - assert response.vcpu_count == 1094 - - -def test_get_sku_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_sku() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetSkuRequest() - - -def test_get_sku_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetSkuRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_sku(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetSkuRequest( - name="name_value", - ) - - -def test_get_sku_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_sku in client._transport._wrapped_methods + assert client._transport.list_skus in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_sku] = mock_rpc + client._transport._wrapped_methods[client._transport.list_skus] = mock_rpc request = {} - client.get_sku(request) + client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_sku(request) + client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12732,7 +12579,7 @@ def test_get_sku_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_sku_empty_call_async(): +async def test_list_skus_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -12741,27 +12588,22 @@ async def test_get_sku_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Sku( - name="name_value", - display_name="display_name_value", - description="description_value", - revision_id="revision_id_value", - is_active=True, - type_=resources.Sku.Type.RACK, - vcpu_count=1094, + service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_sku() + response = await client.list_skus() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetSkuRequest() + assert args[0] == service.ListSkusRequest() @pytest.mark.asyncio -async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12776,7 +12618,7 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async # Ensure method has been cached assert ( - client._client._transport.get_sku + client._client._transport.list_skus in client._client._transport._wrapped_methods ) @@ -12784,16 +12626,16 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_sku + client._client._transport.list_skus ] = mock_rpc request = {} - await client.get_sku(request) + await client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_sku(request) + await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12801,8 +12643,8 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async @pytest.mark.asyncio -async def test_get_sku_async( - transport: str = "grpc_asyncio", request_type=service.GetSkuRequest +async def test_list_skus_async( + transport: str = "grpc_asyncio", request_type=service.ListSkusRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12814,58 +12656,48 @@ async def test_get_sku_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Sku( - name="name_value", - display_name="display_name_value", - description="description_value", - revision_id="revision_id_value", - is_active=True, - type_=resources.Sku.Type.RACK, - vcpu_count=1094, + service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_sku(request) + response = await client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetSkuRequest() + request = service.ListSkusRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Sku) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.revision_id == "revision_id_value" - assert response.is_active is True - assert response.type_ == resources.Sku.Type.RACK - assert response.vcpu_count == 1094 + assert isinstance(response, pagers.ListSkusAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_sku_async_from_dict(): - await test_get_sku_async(request_type=dict) +async def test_list_skus_async_from_dict(): + await test_list_skus_async(request_type=dict) -def test_get_sku_field_headers(): +def test_list_skus_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetSkuRequest() + request = service.ListSkusRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value = resources.Sku() - client.get_sku(request) + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value = service.ListSkusResponse() + client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12876,26 +12708,28 @@ def test_get_sku_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_sku_field_headers_async(): +async def test_list_skus_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetSkuRequest() + request = service.ListSkusRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) - await client.get_sku(request) + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse() + ) + await client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12906,35 +12740,35 @@ async def test_get_sku_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_sku_flattened(): +def test_list_skus_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Sku() + call.return_value = service.ListSkusResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_sku( - name="name_value", + client.list_skus( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_sku_flattened_error(): +def test_list_skus_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12942,41 +12776,43 @@ def test_get_sku_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_sku( - service.GetSkuRequest(), - name="name_value", + client.list_skus( + service.ListSkusRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_sku_flattened_async(): +async def test_list_skus_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Sku() + call.return_value = service.ListSkusResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_sku( - name="name_value", + response = await client.list_skus( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_sku_flattened_error_async(): +async def test_list_skus_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12984,164 +12820,367 @@ async def test_get_sku_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_sku( - service.GetSkuRequest(), - name="name_value", + await client.list_skus( + service.ListSkusRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.ListZonesRequest, - dict, - ], -) -def test_list_zones(request_type, transport: str = "grpc"): +def test_list_skus_pager(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, ) - response = client.list_zones(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_skus(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Sku) for i in results) -def test_list_zones_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. + +def test_list_skus_pages(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, ) - client.list_zones() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest() + pages = list(client.list_skus(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_zones_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( +@pytest.mark.asyncio +async def test_list_skus_async_pager(): + client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListZonesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_zones(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_zones_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + with mock.patch.object( + type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zones in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + async_pager = await client.list_skus( + request={}, ) - client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc - request = {} - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zones(request) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert len(responses) == 6 + assert all(isinstance(i, resources.Sku) for i in responses) @pytest.mark.asyncio -async def test_list_zones_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +async def test_list_skus_async_pages(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Designate an appropriate return value for the call. + with mock.patch.object( + type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_skus(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSkuRequest, + dict, + ], +) +def test_get_sku(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, + ) + response = client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetSkuRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 + + +def test_get_sku_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sku() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest() + + +def test_get_sku_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetSkuRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sku(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest( + name="name_value", + ) + + +def test_get_sku_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sku in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sku] = mock_rpc + request = {} + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sku(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sku_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, ) ) - response = await client.list_zones() + response = await client.get_sku() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest() + assert args[0] == service.GetSkuRequest() @pytest.mark.asyncio -async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13156,7 +13195,7 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_zones + client._client._transport.get_sku in client._client._transport._wrapped_methods ) @@ -13164,16 +13203,16 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_zones + client._client._transport.get_sku ] = mock_rpc request = {} - await client.list_zones(request) + await client.get_sku(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_zones(request) + await client.get_sku(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13181,8 +13220,8 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_zones_async( - transport: str = "grpc_asyncio", request_type=service.ListZonesRequest +async def test_get_sku_async( + transport: str = "grpc_asyncio", request_type=service.GetSkuRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13194,50 +13233,60 @@ async def test_list_zones_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, ) ) - response = await client.list_zones(request) + response = await client.get_sku(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() + request = service.GetSkuRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 @pytest.mark.asyncio -async def test_list_zones_async_from_dict(): - await test_list_zones_async(request_type=dict) +async def test_get_sku_async_from_dict(): + await test_get_sku_async(request_type=dict) -def test_list_zones_field_headers(): +def test_get_sku_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() + request = service.GetSkuRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value = service.ListZonesResponse() - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value = resources.Sku() + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -13246,28 +13295,26 @@ def test_list_zones_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_zones_field_headers_async(): +async def test_get_sku_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() + request = service.GetSkuRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse() - ) - await client.list_zones(request) + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) + await client.get_sku(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13278,35 +13325,35 @@ async def test_list_zones_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_zones_flattened(): +def test_get_sku_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() + call.return_value = resources.Sku() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_zones( - parent="parent_value", + client.get_sku( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_zones_flattened_error(): +def test_get_sku_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13314,43 +13361,41 @@ def test_list_zones_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_zones( - service.ListZonesRequest(), - parent="parent_value", + client.get_sku( + service.GetSkuRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_zones_flattened_async(): +async def test_get_sku_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() + call.return_value = resources.Sku() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_zones( - parent="parent_value", + response = await client.get_sku( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_zones_flattened_error_async(): +async def test_get_sku_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13358,298 +13403,104 @@ async def test_list_zones_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_zones( - service.ListZonesRequest(), - parent="parent_value", + await client.get_sku( + service.GetSkuRequest(), + name="name_value", ) -def test_list_zones_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListZonesRequest, + dict, + ], +) +def test_list_zones(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_zones(request={}, retry=retry, timeout=timeout) + response = client.list_zones(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Zone) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_zones_pages(transport_name: str = "grpc"): +def test_list_zones_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_zones(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_zones() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest() -@pytest.mark.asyncio -async def test_list_zones_async_pager(): - client = GDCHardwareManagementAsyncClient( +def test_list_zones_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZonesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_zones( - request={}, + client.list_zones(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Zone) for i in responses) - -@pytest.mark.asyncio -async def test_list_zones_async_pages(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_zones(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetZoneRequest, - dict, - ], -) -def test_get_zone(request_type, transport: str = "grpc"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone( - name="name_value", - display_name="display_name_value", - state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - globally_unique_id="globally_unique_id_value", - ) - response = client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED - assert response.ciq_uri == "ciq_uri_value" - assert response.globally_unique_id == "globally_unique_id_value" - - -def test_get_zone_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_zone() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest() - - -def test_get_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetZoneRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest( - name="name_value", - ) - - -def test_get_zone_use_cached_wrapped_rpc(): +def test_list_zones_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13663,21 +13514,21 @@ def test_get_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_zone in client._transport._wrapped_methods + assert client._transport.list_zones in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc request = {} - client.get_zone(request) + client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_zone(request) + client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13685,7 +13536,7 @@ def test_get_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_zone_empty_call_async(): +async def test_list_zones_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -13694,25 +13545,22 @@ async def test_get_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Zone( - name="name_value", - display_name="display_name_value", - state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - globally_unique_id="globally_unique_id_value", + service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_zone() + response = await client.list_zones() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest() + assert args[0] == service.ListZonesRequest() @pytest.mark.asyncio -async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13727,7 +13575,7 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_zone + client._client._transport.list_zones in client._client._transport._wrapped_methods ) @@ -13735,16 +13583,16 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_zone + client._client._transport.list_zones ] = mock_rpc request = {} - await client.get_zone(request) + await client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_zone(request) + await client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13752,8 +13600,8 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_zone_async( - transport: str = "grpc_asyncio", request_type=service.GetZoneRequest +async def test_list_zones_async( + transport: str = "grpc_asyncio", request_type=service.ListZonesRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13765,54 +13613,48 @@ async def test_get_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Zone( - name="name_value", - display_name="display_name_value", - state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - globally_unique_id="globally_unique_id_value", + service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_zone(request) + response = await client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() + request = service.ListZonesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED - assert response.ciq_uri == "ciq_uri_value" - assert response.globally_unique_id == "globally_unique_id_value" + assert isinstance(response, pagers.ListZonesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_zone_async_from_dict(): - await test_get_zone_async(request_type=dict) +async def test_list_zones_async_from_dict(): + await test_list_zones_async(request_type=dict) -def test_get_zone_field_headers(): +def test_list_zones_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() + request = service.ListZonesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value = resources.Zone() - client.get_zone(request) + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13823,26 +13665,28 @@ def test_get_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_zone_field_headers_async(): +async def test_list_zones_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() + request = service.ListZonesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) - await client.get_zone(request) + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse() + ) + await client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13853,35 +13697,35 @@ async def test_get_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_zone_flattened(): +def test_list_zones_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Zone() + call.return_value = service.ListZonesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_zone( - name="name_value", + client.list_zones( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_zone_flattened_error(): +def test_list_zones_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13889,41 +13733,43 @@ def test_get_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_zone( - service.GetZoneRequest(), - name="name_value", + client.list_zones( + service.ListZonesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_zone_flattened_async(): +async def test_list_zones_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Zone() + call.return_value = service.ListZonesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_zone( - name="name_value", + response = await client.list_zones( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_zone_flattened_error_async(): +async def test_list_zones_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13931,20 +13777,214 @@ async def test_get_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_zone( - service.GetZoneRequest(), - name="name_value", + await client.list_zones( + service.ListZonesRequest(), + parent="parent_value", + ) + + +def test_list_zones_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_zones(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) for i in results) + + +def test_list_zones_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zones(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_zones_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zones( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Zone) for i in responses) + + +@pytest.mark.asyncio +async def test_list_zones_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zones(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.CreateZoneRequest, + service.GetZoneRequest, dict, ], ) -def test_create_zone(request_type, transport: str = "grpc"): +def test_get_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13955,22 +13995,33 @@ def test_create_zone(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_zone(request) + call.return_value = resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) + response = client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() + request = service.GetZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" -def test_create_zone_empty_call(): +def test_get_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -13979,17 +14030,17 @@ def test_create_zone_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_zone() + client.get_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest() + assert args[0] == service.GetZoneRequest() -def test_create_zone_non_empty_request_with_auto_populated_field(): +def test_get_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -14000,26 +14051,24 @@ def test_create_zone_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", + request = service.GetZoneRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_zone(request=request) + client.get_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", + assert args[0] == service.GetZoneRequest( + name="name_value", ) -def test_create_zone_use_cached_wrapped_rpc(): +def test_get_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14033,26 +14082,21 @@ def test_create_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_zone in client._transport._wrapped_methods + assert client._transport.get_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc request = {} - client.create_zone(request) + client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_zone(request) + client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14060,7 +14104,7 @@ def test_create_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_zone_empty_call_async(): +async def test_get_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -14069,21 +14113,25 @@ async def test_create_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) ) - response = await client.create_zone() + response = await client.get_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest() + assert args[0] == service.GetZoneRequest() @pytest.mark.asyncio -async def test_create_zone_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14098,7 +14146,7 @@ async def test_create_zone_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_zone + client._client._transport.get_zone in client._client._transport._wrapped_methods ) @@ -14106,21 +14154,16 @@ async def test_create_zone_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_zone + client._client._transport.get_zone ] = mock_rpc request = {} - await client.create_zone(request) + await client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_zone(request) + await client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14128,8 +14171,8 @@ async def test_create_zone_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_zone_async( - transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest +async def test_get_zone_async( + transport: str = "grpc_asyncio", request_type=service.GetZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14141,43 +14184,54 @@ async def test_create_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) ) - response = await client.create_zone(request) + response = await client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() + request = service.GetZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" @pytest.mark.asyncio -async def test_create_zone_async_from_dict(): - await test_create_zone_async(request_type=dict) +async def test_get_zone_async_from_dict(): + await test_get_zone_async(request_type=dict) -def test_create_zone_field_headers(): +def test_get_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() + request = service.GetZoneRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_zone(request) + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value = resources.Zone() + client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14188,28 +14242,26 @@ def test_create_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_zone_field_headers_async(): +async def test_get_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() + request = service.GetZoneRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_zone(request) + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + await client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14220,43 +14272,35 @@ async def test_create_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_zone_flattened(): +def test_get_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Zone() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_zone( - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + client.get_zone( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name="name_value") - assert arg == mock_val - arg = args[0].zone_id - mock_val = "zone_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_zone_flattened_error(): +def test_get_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14264,53 +14308,41 @@ def test_create_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_zone( - service.CreateZoneRequest(), - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + client.get_zone( + service.GetZoneRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_zone_flattened_async(): +async def test_get_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Zone() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_zone( - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + response = await client.get_zone( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name="name_value") - assert arg == mock_val - arg = args[0].zone_id - mock_val = "zone_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_zone_flattened_error_async(): +async def test_get_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14318,22 +14350,20 @@ async def test_create_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_zone( - service.CreateZoneRequest(), - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + await client.get_zone( + service.GetZoneRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateZoneRequest, + service.CreateZoneRequest, dict, ], ) -def test_update_zone(request_type, transport: str = "grpc"): +def test_create_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14344,22 +14374,22 @@ def test_update_zone(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_zone(request) + response = client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_zone_empty_call(): +def test_create_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -14368,17 +14398,17 @@ def test_update_zone_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_zone() + client.create_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest() + assert args[0] == service.CreateZoneRequest() -def test_update_zone_non_empty_request_with_auto_populated_field(): +def test_create_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -14389,20 +14419,26 @@ def test_update_zone_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_zone(request=request) + client.create_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest() + assert args[0] == service.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + ) -def test_update_zone_use_cached_wrapped_rpc(): +def test_create_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14416,16 +14452,16 @@ def test_update_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_zone in client._transport._wrapped_methods + assert client._transport.create_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc request = {} - client.update_zone(request) + client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14435,7 +14471,7 @@ def test_update_zone_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_zone(request) + client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14443,7 +14479,7 @@ def test_update_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_zone_empty_call_async(): +async def test_create_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -14452,19 +14488,19 @@ async def test_update_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_zone() + response = await client.create_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest() + assert args[0] == service.CreateZoneRequest() @pytest.mark.asyncio -async def test_update_zone_async_use_cached_wrapped_rpc( +async def test_create_zone_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14481,7 +14517,7 @@ async def test_update_zone_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_zone + client._client._transport.create_zone in client._client._transport._wrapped_methods ) @@ -14489,11 +14525,11 @@ async def test_update_zone_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_zone + client._client._transport.create_zone ] = mock_rpc request = {} - await client.update_zone(request) + await client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14503,7 +14539,7 @@ async def test_update_zone_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_zone(request) + await client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14511,8 +14547,8 @@ async def test_update_zone_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_zone_async( - transport: str = "grpc_asyncio", request_type=service.UpdateZoneRequest +async def test_create_zone_async( + transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14524,17 +14560,17 @@ async def test_update_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_zone(request) + response = await client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -14542,25 +14578,25 @@ async def test_update_zone_async( @pytest.mark.asyncio -async def test_update_zone_async_from_dict(): - await test_update_zone_async(request_type=dict) +async def test_create_zone_async_from_dict(): + await test_create_zone_async(request_type=dict) -def test_update_zone_field_headers(): +def test_create_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() - request.zone.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_zone(request) + client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14571,28 +14607,28 @@ def test_update_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "zone.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_zone_field_headers_async(): +async def test_create_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() - request.zone.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_zone(request) + await client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14603,39 +14639,43 @@ async def test_update_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "zone.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_zone_flattened(): +def test_create_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_zone( + client.create_zone( + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].zone mock_val = resources.Zone(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].zone_id + mock_val = "zone_id_value" assert arg == mock_val -def test_update_zone_flattened_error(): +def test_create_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14643,21 +14683,22 @@ def test_update_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_zone( - service.UpdateZoneRequest(), + client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) @pytest.mark.asyncio -async def test_update_zone_flattened_async(): +async def test_create_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -14666,25 +14707,29 @@ async def test_update_zone_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_zone( + response = await client.create_zone( + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].zone mock_val = resources.Zone(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].zone_id + mock_val = "zone_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_zone_flattened_error_async(): +async def test_create_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14692,21 +14737,22 @@ async def test_update_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_zone( - service.UpdateZoneRequest(), + await client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.DeleteZoneRequest, + service.UpdateZoneRequest, dict, ], ) -def test_delete_zone(request_type, transport: str = "grpc"): +def test_update_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14717,22 +14763,22 @@ def test_delete_zone(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_zone(request) + response = client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_zone_empty_call(): +def test_update_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -14741,17 +14787,17 @@ def test_delete_zone_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_zone() + client.update_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest() + assert args[0] == service.UpdateZoneRequest() -def test_delete_zone_non_empty_request_with_auto_populated_field(): +def test_update_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -14762,24 +14808,20 @@ def test_delete_zone_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteZoneRequest( - name="name_value", - ) + request = service.UpdateZoneRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_zone(request=request) + client.update_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest( - name="name_value", - ) + assert args[0] == service.UpdateZoneRequest() -def test_delete_zone_use_cached_wrapped_rpc(): +def test_update_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14793,16 +14835,16 @@ def test_delete_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_zone in client._transport._wrapped_methods + assert client._transport.update_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc request = {} - client.delete_zone(request) + client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14812,7 +14854,7 @@ def test_delete_zone_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_zone(request) + client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14820,7 +14862,7 @@ def test_delete_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_zone_empty_call_async(): +async def test_update_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -14829,19 +14871,19 @@ async def test_delete_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_zone() + response = await client.update_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest() + assert args[0] == service.UpdateZoneRequest() @pytest.mark.asyncio -async def test_delete_zone_async_use_cached_wrapped_rpc( +async def test_update_zone_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14858,7 +14900,7 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_zone + client._client._transport.update_zone in client._client._transport._wrapped_methods ) @@ -14866,11 +14908,11 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_zone + client._client._transport.update_zone ] = mock_rpc request = {} - await client.delete_zone(request) + await client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14880,7 +14922,7 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_zone(request) + await client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14888,8 +14930,8 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_zone_async( - transport: str = "grpc_asyncio", request_type=service.DeleteZoneRequest +async def test_update_zone_async( + transport: str = "grpc_asyncio", request_type=service.UpdateZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14901,17 +14943,17 @@ async def test_delete_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_zone(request) + response = await client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -14919,25 +14961,25 @@ async def test_delete_zone_async( @pytest.mark.asyncio -async def test_delete_zone_async_from_dict(): - await test_delete_zone_async(request_type=dict) +async def test_update_zone_async_from_dict(): + await test_update_zone_async(request_type=dict) -def test_delete_zone_field_headers(): +def test_update_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() - request.name = "name_value" + request.zone.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_zone(request) + client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14948,28 +14990,28 @@ def test_delete_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "zone.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_zone_field_headers_async(): +async def test_update_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() - request.name = "name_value" + request.zone.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_zone(request) + await client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14980,35 +15022,39 @@ async def test_delete_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "zone.name=name_value", ) in kw["metadata"] -def test_delete_zone_flattened(): +def test_update_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_zone( - name="name_value", + client.update_zone( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_zone_flattened_error(): +def test_update_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15016,20 +15062,21 @@ def test_delete_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_zone( - service.DeleteZoneRequest(), - name="name_value", + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_zone_flattened_async(): +async def test_update_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -15038,21 +15085,25 @@ async def test_delete_zone_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_zone( - name="name_value", + response = await client.update_zone( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_zone_flattened_error_async(): +async def test_update_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15060,20 +15111,21 @@ async def test_delete_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_zone( - service.DeleteZoneRequest(), - name="name_value", + await client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.SignalZoneStateRequest, + service.DeleteZoneRequest, dict, ], ) -def test_signal_zone_state(request_type, transport: str = "grpc"): +def test_delete_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15084,24 +15136,22 @@ def test_signal_zone_state(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.signal_zone_state(request) + response = client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_signal_zone_state_empty_call(): +def test_delete_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -15110,19 +15160,17 @@ def test_signal_zone_state_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.signal_zone_state() + client.delete_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SignalZoneStateRequest() + assert args[0] == service.DeleteZoneRequest() -def test_signal_zone_state_non_empty_request_with_auto_populated_field(): +def test_delete_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -15133,26 +15181,24 @@ def test_signal_zone_state_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.SignalZoneStateRequest( + request = service.DeleteZoneRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.signal_zone_state(request=request) + client.delete_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SignalZoneStateRequest( + assert args[0] == service.DeleteZoneRequest( name="name_value", ) -def test_signal_zone_state_use_cached_wrapped_rpc(): +def test_delete_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15166,18 +15212,16 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.signal_zone_state in client._transport._wrapped_methods + assert client._transport.delete_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.signal_zone_state - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc request = {} - client.signal_zone_state(request) + client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15187,7 +15231,7 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.signal_zone_state(request) + client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15195,7 +15239,7 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_signal_zone_state_empty_call_async(): +async def test_delete_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -15204,21 +15248,19 @@ async def test_signal_zone_state_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.signal_zone_state() + response = await client.delete_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SignalZoneStateRequest() + assert args[0] == service.DeleteZoneRequest() @pytest.mark.asyncio -async def test_signal_zone_state_async_use_cached_wrapped_rpc( +async def test_delete_zone_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15235,7 +15277,7 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.signal_zone_state + client._client._transport.delete_zone in client._client._transport._wrapped_methods ) @@ -15243,11 +15285,11 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.signal_zone_state + client._client._transport.delete_zone ] = mock_rpc request = {} - await client.signal_zone_state(request) + await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15257,7 +15299,7 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.signal_zone_state(request) + await client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15265,8 +15307,8 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_signal_zone_state_async( - transport: str = "grpc_asyncio", request_type=service.SignalZoneStateRequest +async def test_delete_zone_async( + transport: str = "grpc_asyncio", request_type=service.DeleteZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15278,19 +15320,17 @@ async def test_signal_zone_state_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.signal_zone_state(request) + response = await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -15298,27 +15338,25 @@ async def test_signal_zone_state_async( @pytest.mark.asyncio -async def test_signal_zone_state_async_from_dict(): - await test_signal_zone_state_async(request_type=dict) +async def test_delete_zone_async_from_dict(): + await test_delete_zone_async(request_type=dict) -def test_signal_zone_state_field_headers(): +def test_delete_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.signal_zone_state(request) + client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15334,25 +15372,23 @@ def test_signal_zone_state_field_headers(): @pytest.mark.asyncio -async def test_signal_zone_state_field_headers_async(): +async def test_delete_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.signal_zone_state(request) + await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15367,22 +15403,19 @@ async def test_signal_zone_state_field_headers_async(): ) in kw["metadata"] -def test_signal_zone_state_flattened(): +def test_delete_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.signal_zone_state( + client.delete_zone( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) # Establish that the underlying call was made with the expected @@ -15392,12 +15425,9 @@ def test_signal_zone_state_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].state_signal - mock_val = service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP - assert arg == mock_val -def test_signal_zone_state_flattened_error(): +def test_delete_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15405,23 +15435,20 @@ def test_signal_zone_state_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.signal_zone_state( - service.SignalZoneStateRequest(), + client.delete_zone( + service.DeleteZoneRequest(), name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) @pytest.mark.asyncio -async def test_signal_zone_state_flattened_async(): +async def test_delete_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -15430,9 +15457,8 @@ async def test_signal_zone_state_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.signal_zone_state( + response = await client.delete_zone( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) # Establish that the underlying call was made with the expected @@ -15442,13 +15468,10 @@ async def test_signal_zone_state_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].state_signal - mock_val = service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP - assert arg == mock_val @pytest.mark.asyncio -async def test_signal_zone_state_flattened_error_async(): +async def test_delete_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15456,56 +15479,845 @@ async def test_signal_zone_state_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.signal_zone_state( - service.SignalZoneStateRequest(), + await client.delete_zone( + service.DeleteZoneRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SignalZoneStateRequest, + dict, + ], +) +def test_signal_zone_state(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.SignalZoneStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_signal_zone_state_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.signal_zone_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest() + + +def test_signal_zone_state_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.SignalZoneStateRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.signal_zone_state(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) +def test_signal_zone_state_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.signal_zone_state in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.signal_zone_state + ] = mock_rpc + request = {} + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_signal_zone_state_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.signal_zone_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest() + + +@pytest.mark.asyncio +async def test_signal_zone_state_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.signal_zone_state + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.signal_zone_state + ] = mock_rpc + + request = {} + await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_signal_zone_state_async( + transport: str = "grpc_asyncio", request_type=service.SignalZoneStateRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.SignalZoneStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_signal_zone_state_async_from_dict(): + await test_signal_zone_state_async(request_type=dict) + + +def test_signal_zone_state_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SignalZoneStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_signal_zone_state_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SignalZoneStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_signal_zone_state_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.signal_zone_state( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].state_signal + mock_val = ( + service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED + ) + assert arg == mock_val + + +def test_signal_zone_state_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + +@pytest.mark.asyncio +async def test_signal_zone_state_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.signal_zone_state( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].state_signal + mock_val = ( + service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_signal_zone_state_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListOrdersRequest, + dict, + ], +) +def test_list_orders_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_orders(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_orders_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_orders in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + + request = {} + client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_orders(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_orders(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_orders_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_orders._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_orders_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_orders" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_orders" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListOrdersRequest.pb(service.ListOrdersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListOrdersResponse.to_json( + service.ListOrdersResponse() + ) + + request = service.ListOrdersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListOrdersResponse() + + client.list_orders( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_orders_rest_bad_request( + transport: str = "rest", request_type=service.ListOrdersRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_orders(request) + + +def test_list_orders_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_orders(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/orders" + % client.transport._host, + args[1], + ) + + +def test_list_orders_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListOrdersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_orders(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Order) for i in results) + + pages = list(client.list_orders(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ - service.ListOrdersRequest, + service.GetOrderRequest, dict, ], ) -def test_list_orders_rest(request_type): +def test_get_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListOrdersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Order( + name="name_value", + display_name="display_name_value", + state=resources.Order.State.DRAFT, + target_workloads=["target_workloads_value"], + customer_motivation="customer_motivation_value", + region_code="region_code_value", + order_form_uri="order_form_uri_value", + type_=resources.Order.Type.PAID, + billing_id="billing_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListOrdersResponse.pb(return_value) + return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.get_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrdersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Order.State.DRAFT + assert response.target_workloads == ["target_workloads_value"] + assert response.customer_motivation == "customer_motivation_value" + assert response.region_code == "region_code_value" + assert response.order_form_uri == "order_form_uri_value" + assert response.type_ == resources.Order.Type.PAID + assert response.billing_id == "billing_id_value" -def test_list_orders_rest_use_cached_wrapped_rpc(): +def test_get_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15519,33 +16331,33 @@ def test_list_orders_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_orders in client._transport._wrapped_methods + assert client._transport.get_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc request = {} - client.list_orders(request) + client.get_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_orders(request) + client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest): +def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15556,30 +16368,21 @@ def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) + ).get_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15588,7 +16391,7 @@ def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListOrdersResponse() + return_value = resources.Order() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15609,40 +16412,30 @@ def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListOrdersResponse.pb(return_value) + return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.get_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_orders_rest_unset_required_fields(): +def test_get_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_orders._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_orders_rest_interceptors(null_interceptor): +def test_get_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15655,13 +16448,13 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_orders" + transports.GDCHardwareManagementRestInterceptor, "post_get_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_orders" + transports.GDCHardwareManagementRestInterceptor, "pre_get_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListOrdersRequest.pb(service.ListOrdersRequest()) + pb_message = service.GetOrderRequest.pb(service.GetOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15672,19 +16465,17 @@ def test_list_orders_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListOrdersResponse.to_json( - service.ListOrdersResponse() - ) + req.return_value._content = resources.Order.to_json(resources.Order()) - request = service.ListOrdersRequest() + request = service.GetOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListOrdersResponse() + post.return_value = resources.Order() - client.list_orders( + client.get_order( request, metadata=[ ("key", "val"), @@ -15696,8 +16487,8 @@ def test_list_orders_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_orders_rest_bad_request( - transport: str = "rest", request_type=service.ListOrdersRequest +def test_get_order_rest_bad_request( + transport: str = "rest", request_type=service.GetOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15705,7 +16496,7 @@ def test_list_orders_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15717,10 +16508,10 @@ def test_list_orders_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_orders(request) + client.get_order(request) -def test_list_orders_rest_flattened(): +def test_get_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15729,14 +16520,14 @@ def test_list_orders_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListOrdersResponse() + return_value = resources.Order() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -15744,25 +16535,25 @@ def test_list_orders_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListOrdersResponse.pb(return_value) + return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_orders(**mock_args) + client.get_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/orders" + "%s/v1alpha/{name=projects/*/locations/*/orders/*}" % client.transport._host, args[1], ) -def test_list_orders_rest_flattened_error(transport: str = "rest"): +def test_get_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15771,130 +16562,182 @@ def test_list_orders_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_orders( - service.ListOrdersRequest(), - parent="parent_value", + client.get_order( + service.GetOrderRequest(), + name="name_value", ) -def test_list_orders_rest_pager(transport: str = "rest"): +def test_get_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateOrderRequest, + dict, + ], +) +def test_create_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListOrdersResponse( - orders=[ - resources.Order(), - resources.Order(), - resources.Order(), - ], - next_page_token="abc", - ), - service.ListOrdersResponse( - orders=[], - next_page_token="def", - ), - service.ListOrdersResponse( - orders=[ - resources.Order(), - ], - next_page_token="ghi", - ), - service.ListOrdersResponse( - orders=[ - resources.Order(), - resources.Order(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["order"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "target_workloads": ["target_workloads_value1", "target_workloads_value2"], + "customer_motivation": "customer_motivation_value", + "fulfillment_time": {}, + "region_code": "region_code_value", + "order_form_uri": "order_form_uri_value", + "type_": 1, + "submit_time": {}, + "billing_id": "billing_id_value", + "existing_hardware": [ + { + "site": "site_value", + "rack_location": "rack_location_value", + "rack_space": [{"start_rack_unit": 1613, "end_rack_unit": 1366}], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the values into proper Response objs - response = tuple(service.ListOrdersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateOrderRequest.meta.fields["order"] - sample_request = {"parent": "projects/sample1/locations/sample2"} + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pager = client.list_orders(request=sample_request) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Order) for i in results) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - pages = list(client.list_orders(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - service.GetOrderRequest, - dict, - ], -) -def test_get_order_rest(request_type): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["order"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["order"][field])): + del request_init["order"][field][i][subfield] + else: + del request_init["order"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Order( - name="name_value", - display_name="display_name_value", - state=resources.Order.State.DRAFT, - target_workloads=["target_workloads_value"], - customer_motivation="customer_motivation_value", - region_code="region_code_value", - order_form_uri="order_form_uri_value", - type_=resources.Order.Type.PAID, - billing_id="billing_id_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.create_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Order) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.state == resources.Order.State.DRAFT - assert response.target_workloads == ["target_workloads_value"] - assert response.customer_motivation == "customer_motivation_value" - assert response.region_code == "region_code_value" - assert response.order_form_uri == "order_form_uri_value" - assert response.type_ == resources.Order.Type.PAID - assert response.billing_id == "billing_id_value" + assert response.operation.name == "operations/spam" -def test_get_order_rest_use_cached_wrapped_rpc(): +def test_create_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15908,33 +16751,37 @@ def test_get_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_order in client._transport._wrapped_methods + assert client._transport.create_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + client._transport._wrapped_methods[client._transport.create_order] = mock_rpc request = {} - client.get_order(request) + client.create_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_order(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): +def test_create_order_rest_required_fields(request_type=service.CreateOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15945,21 +16792,28 @@ def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).create_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).create_order._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15968,7 +16822,7 @@ def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Order() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15980,39 +16834,50 @@ def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.create_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_order_rest_unset_required_fields(): +def test_create_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_order._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderId", + "requestId", + ) + ) + & set( + ( + "parent", + "order", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_order_rest_interceptors(null_interceptor): +def test_create_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16025,13 +16890,15 @@ def test_get_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_order" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_order" + transports.GDCHardwareManagementRestInterceptor, "pre_create_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetOrderRequest.pb(service.GetOrderRequest()) + pb_message = service.CreateOrderRequest.pb(service.CreateOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16042,17 +16909,19 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Order.to_json(resources.Order()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetOrderRequest() + request = service.CreateOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Order() + post.return_value = operations_pb2.Operation() - client.get_order( + client.create_order( request, metadata=[ ("key", "val"), @@ -16064,8 +16933,8 @@ def test_get_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_order_rest_bad_request( - transport: str = "rest", request_type=service.GetOrderRequest +def test_create_order_rest_bad_request( + transport: str = "rest", request_type=service.CreateOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16073,7 +16942,7 @@ def test_get_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16085,10 +16954,10 @@ def test_get_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_order(request) + client.create_order(request) -def test_get_order_rest_flattened(): +def test_create_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16097,40 +16966,40 @@ def test_get_order_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Order() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_order(**mock_args) + client.create_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/orders" % client.transport._host, args[1], ) -def test_get_order_rest_flattened_error(transport: str = "rest"): +def test_create_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16139,13 +17008,15 @@ def test_get_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_order( - service.GetOrderRequest(), - name="name_value", + client.create_order( + service.CreateOrderRequest(), + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", ) -def test_get_order_rest_error(): +def test_create_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16154,20 +17025,22 @@ def test_get_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateOrderRequest, + service.UpdateOrderRequest, dict, ], ) -def test_create_order_rest(request_type): +def test_update_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } request_init["order"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/orders/sample3", "display_name": "display_name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -16232,7 +17105,7 @@ def test_create_order_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateOrderRequest.meta.fields["order"] + test_field = service.UpdateOrderRequest.meta.fields["order"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -16308,13 +17181,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_order(request) + response = client.update_order(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_order_rest_use_cached_wrapped_rpc(): +def test_update_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16328,17 +17201,17 @@ def test_create_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_order in client._transport._wrapped_methods + assert client._transport.update_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_order] = mock_rpc + client._transport._wrapped_methods[client._transport.update_order] = mock_rpc request = {} - client.create_order(request) + client.update_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -16347,18 +17220,17 @@ def test_create_order_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_order(request) + client.update_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_order_rest_required_fields(request_type=service.CreateOrderRequest): +def test_update_order_rest_required_fields(request_type=service.UpdateOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16369,28 +17241,24 @@ def test_create_order_rest_required_fields(request_type=service.CreateOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_order._get_unset_required_fields(jsonified_request) + ).update_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_order._get_unset_required_fields(jsonified_request) + ).update_order._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "order_id", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16411,7 +17279,7 @@ def test_create_order_rest_required_fields(request_type=service.CreateOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -16424,29 +17292,29 @@ def test_create_order_rest_required_fields(request_type=service.CreateOrderReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_order(request) + response = client.update_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_order_rest_unset_required_fields(): +def test_update_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_order._get_unset_required_fields({}) + unset_fields = transport.update_order._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "orderId", "requestId", + "updateMask", ) ) & set( ( - "parent", + "updateMask", "order", ) ) @@ -16454,7 +17322,7 @@ def test_create_order_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_order_rest_interceptors(null_interceptor): +def test_update_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16469,13 +17337,13 @@ def test_create_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_order" + transports.GDCHardwareManagementRestInterceptor, "post_update_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_order" + transports.GDCHardwareManagementRestInterceptor, "pre_update_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateOrderRequest.pb(service.CreateOrderRequest()) + pb_message = service.UpdateOrderRequest.pb(service.UpdateOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16490,7 +17358,7 @@ def test_create_order_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateOrderRequest() + request = service.UpdateOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16498,7 +17366,7 @@ def test_create_order_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_order( + client.update_order( request, metadata=[ ("key", "val"), @@ -16510,8 +17378,8 @@ def test_create_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_order_rest_bad_request( - transport: str = "rest", request_type=service.CreateOrderRequest +def test_update_order_rest_bad_request( + transport: str = "rest", request_type=service.UpdateOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16519,7 +17387,9 @@ def test_create_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16531,10 +17401,10 @@ def test_create_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_order(request) + client.update_order(request) -def test_create_order_rest_flattened(): +def test_update_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16546,13 +17416,14 @@ def test_create_order_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", order=resources.Order(name="name_value"), - order_id="order_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -16563,20 +17434,20 @@ def test_create_order_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_order(**mock_args) + client.update_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/orders" + "%s/v1alpha/{order.name=projects/*/locations/*/orders/*}" % client.transport._host, args[1], ) -def test_create_order_rest_flattened_error(transport: str = "rest"): +def test_update_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16585,15 +17456,14 @@ def test_create_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_order( - service.CreateOrderRequest(), - parent="parent_value", + client.update_order( + service.UpdateOrderRequest(), order=resources.Order(name="name_value"), - order_id="order_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_order_rest_error(): +def test_update_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16602,148 +17472,18 @@ def test_create_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateOrderRequest, + service.DeleteOrderRequest, dict, ], ) -def test_update_order_rest(request_type): +def test_delete_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} - } - request_init["order"] = { - "name": "projects/sample1/locations/sample2/orders/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "organization_contact": { - "address": { - "revision": 879, - "region_code": "region_code_value", - "language_code": "language_code_value", - "postal_code": "postal_code_value", - "sorting_code": "sorting_code_value", - "administrative_area": "administrative_area_value", - "locality": "locality_value", - "sublocality": "sublocality_value", - "address_lines": ["address_lines_value1", "address_lines_value2"], - "recipients": ["recipients_value1", "recipients_value2"], - "organization": "organization_value", - }, - "email": "email_value", - "phone": "phone_value", - "contacts": [ - { - "given_name": "given_name_value", - "family_name": "family_name_value", - "email": "email_value", - "phone": "phone_value", - "time_zone": {"id": "id_value", "version": "version_value"}, - "reachable_times": [ - { - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "end_time": {}, - "days": [1], - } - ], - } - ], - }, - "target_workloads": ["target_workloads_value1", "target_workloads_value2"], - "customer_motivation": "customer_motivation_value", - "fulfillment_time": {}, - "region_code": "region_code_value", - "order_form_uri": "order_form_uri_value", - "type_": 1, - "submit_time": {}, - "billing_id": "billing_id_value", - "existing_hardware": [ - { - "site": "site_value", - "rack_location": "rack_location_value", - "rack_space": [{"start_rack_unit": 1613, "end_rack_unit": 1366}], - } - ], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateOrderRequest.meta.fields["order"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["order"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["order"][field])): - del request_init["order"][field][i][subfield] - else: - del request_init["order"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16758,13 +17498,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_order(request) + response = client.delete_order(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_order_rest_use_cached_wrapped_rpc(): +def test_delete_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16778,17 +17518,17 @@ def test_update_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_order in client._transport._wrapped_methods + assert client._transport.delete_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_order] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_order] = mock_rpc request = {} - client.update_order(request) + client.delete_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -16797,17 +17537,18 @@ def test_update_order_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_order(request) + client.delete_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_order_rest_required_fields(request_type=service.UpdateOrderRequest): +def test_delete_order_rest_required_fields(request_type=service.DeleteOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16818,24 +17559,28 @@ def test_update_order_rest_required_fields(request_type=service.UpdateOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_order._get_unset_required_fields(jsonified_request) + ).delete_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_order._get_unset_required_fields(jsonified_request) + ).delete_order._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "force", "request_id", - "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16856,10 +17601,9 @@ def test_update_order_rest_required_fields(request_type=service.UpdateOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -16869,37 +17613,32 @@ def test_update_order_rest_required_fields(request_type=service.UpdateOrderReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_order(request) + response = client.delete_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_order_rest_unset_required_fields(): +def test_delete_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_order._get_unset_required_fields({}) + unset_fields = transport.delete_order._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "order", + "force", + "requestId", ) ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_order_rest_interceptors(null_interceptor): +def test_delete_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16914,13 +17653,13 @@ def test_update_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_order" + transports.GDCHardwareManagementRestInterceptor, "post_delete_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_order" + transports.GDCHardwareManagementRestInterceptor, "pre_delete_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateOrderRequest.pb(service.UpdateOrderRequest()) + pb_message = service.DeleteOrderRequest.pb(service.DeleteOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16935,7 +17674,7 @@ def test_update_order_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateOrderRequest() + request = service.DeleteOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16943,7 +17682,7 @@ def test_update_order_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_order( + client.delete_order( request, metadata=[ ("key", "val"), @@ -16955,8 +17694,8 @@ def test_update_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_order_rest_bad_request( - transport: str = "rest", request_type=service.UpdateOrderRequest +def test_delete_order_rest_bad_request( + transport: str = "rest", request_type=service.DeleteOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16964,9 +17703,7 @@ def test_update_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16978,10 +17715,10 @@ def test_update_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_order(request) + client.delete_order(request) -def test_update_order_rest_flattened(): +def test_delete_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16993,14 +17730,11 @@ def test_update_order_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - order=resources.Order(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -17011,20 +17745,20 @@ def test_update_order_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_order(**mock_args) + client.delete_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{order.name=projects/*/locations/*/orders/*}" + "%s/v1alpha/{name=projects/*/locations/*/orders/*}" % client.transport._host, args[1], ) -def test_update_order_rest_flattened_error(transport: str = "rest"): +def test_delete_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17033,14 +17767,13 @@ def test_update_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_order( - service.UpdateOrderRequest(), - order=resources.Order(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_order( + service.DeleteOrderRequest(), + name="name_value", ) -def test_update_order_rest_error(): +def test_delete_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17049,11 +17782,11 @@ def test_update_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteOrderRequest, + service.SubmitOrderRequest, dict, ], ) -def test_delete_order_rest(request_type): +def test_submit_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17075,13 +17808,13 @@ def test_delete_order_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_order(request) + response = client.submit_order(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_order_rest_use_cached_wrapped_rpc(): +def test_submit_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17095,17 +17828,17 @@ def test_delete_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_order in client._transport._wrapped_methods + assert client._transport.submit_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_order] = mock_rpc + client._transport._wrapped_methods[client._transport.submit_order] = mock_rpc request = {} - client.delete_order(request) + client.submit_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -17114,14 +17847,14 @@ def test_delete_order_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_order(request) + client.submit_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_order_rest_required_fields(request_type=service.DeleteOrderRequest): +def test_submit_order_rest_required_fields(request_type=service.SubmitOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} @@ -17136,7 +17869,7 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_order._get_unset_required_fields(jsonified_request) + ).submit_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17145,14 +17878,7 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_order._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "force", - "request_id", - ) - ) + ).submit_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17178,9 +17904,10 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -17190,32 +17917,24 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_order(request) + response = client.submit_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_order_rest_unset_required_fields(): +def test_submit_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_order._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "force", - "requestId", - ) - ) - & set(("name",)) - ) + unset_fields = transport.submit_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_order_rest_interceptors(null_interceptor): +def test_submit_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17230,13 +17949,13 @@ def test_delete_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_delete_order" + transports.GDCHardwareManagementRestInterceptor, "post_submit_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_delete_order" + transports.GDCHardwareManagementRestInterceptor, "pre_submit_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteOrderRequest.pb(service.DeleteOrderRequest()) + pb_message = service.SubmitOrderRequest.pb(service.SubmitOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17251,7 +17970,7 @@ def test_delete_order_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.DeleteOrderRequest() + request = service.SubmitOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -17259,7 +17978,7 @@ def test_delete_order_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_order( + client.submit_order( request, metadata=[ ("key", "val"), @@ -17271,8 +17990,8 @@ def test_delete_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_order_rest_bad_request( - transport: str = "rest", request_type=service.DeleteOrderRequest +def test_submit_order_rest_bad_request( + transport: str = "rest", request_type=service.SubmitOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17292,10 +18011,10 @@ def test_delete_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_order(request) + client.submit_order(request) -def test_delete_order_rest_flattened(): +def test_submit_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17322,20 +18041,20 @@ def test_delete_order_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_order(**mock_args) + client.submit_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*}" + "%s/v1alpha/{name=projects/*/locations/*/orders/*}:submit" % client.transport._host, args[1], ) -def test_delete_order_rest_flattened_error(transport: str = "rest"): +def test_submit_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17344,13 +18063,13 @@ def test_delete_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_order( - service.DeleteOrderRequest(), + client.submit_order( + service.SubmitOrderRequest(), name="name_value", ) -def test_delete_order_rest_error(): +def test_submit_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17359,39 +18078,46 @@ def test_delete_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.SubmitOrderRequest, + service.ListSitesRequest, dict, ], ) -def test_submit_order_rest(request_type): +def test_list_sites_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSitesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.submit_order(request) + response = client.list_sites(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_submit_order_rest_use_cached_wrapped_rpc(): +def test_list_sites_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17405,37 +18131,33 @@ def test_submit_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.submit_order in client._transport._wrapped_methods + assert client._transport.list_sites in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.submit_order] = mock_rpc + client._transport._wrapped_methods[client._transport.list_sites] = mock_rpc request = {} - client.submit_order(request) + client.list_sites(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.submit_order(request) + client.list_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_submit_order_rest_required_fields(request_type=service.SubmitOrderRequest): +def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17446,21 +18168,30 @@ def test_submit_order_rest_required_fields(request_type=service.SubmitOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).submit_order._get_unset_required_fields(jsonified_request) + ).list_sites._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).submit_order._get_unset_required_fields(jsonified_request) + ).list_sites._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17469,7 +18200,7 @@ def test_submit_order_rest_required_fields(request_type=service.SubmitOrderReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSitesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17481,37 +18212,49 @@ def test_submit_order_rest_required_fields(request_type=service.SubmitOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.submit_order(request) + response = client.list_sites(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_submit_order_rest_unset_required_fields(): +def test_list_sites_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.submit_order._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_submit_order_rest_interceptors(null_interceptor): +def test_list_sites_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17524,15 +18267,13 @@ def test_submit_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_submit_order" + transports.GDCHardwareManagementRestInterceptor, "post_list_sites" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_submit_order" + transports.GDCHardwareManagementRestInterceptor, "pre_list_sites" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.SubmitOrderRequest.pb(service.SubmitOrderRequest()) + pb_message = service.ListSitesRequest.pb(service.ListSitesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17543,19 +18284,19 @@ def test_submit_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListSitesResponse.to_json( + service.ListSitesResponse() ) - request = service.SubmitOrderRequest() + request = service.ListSitesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListSitesResponse() - client.submit_order( + client.list_sites( request, metadata=[ ("key", "val"), @@ -17567,8 +18308,8 @@ def test_submit_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_submit_order_rest_bad_request( - transport: str = "rest", request_type=service.SubmitOrderRequest +def test_list_sites_rest_bad_request( + transport: str = "rest", request_type=service.ListSitesRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17576,7 +18317,7 @@ def test_submit_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17588,10 +18329,10 @@ def test_submit_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.submit_order(request) + client.list_sites(request) -def test_submit_order_rest_flattened(): +def test_list_sites_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17600,38 +18341,39 @@ def test_submit_order_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSitesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.submit_order(**mock_args) + client.list_sites(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*}:submit" - % client.transport._host, + "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, args[1], ) -def test_submit_order_rest_flattened_error(transport: str = "rest"): +def test_list_sites_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17640,61 +18382,124 @@ def test_submit_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.submit_order( - service.SubmitOrderRequest(), - name="name_value", + client.list_sites( + service.ListSitesRequest(), + parent="parent_value", ) -def test_submit_order_rest_error(): +def test_list_sites_rest_pager(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListSitesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_sites(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Site) for i in results) + + pages = list(client.list_sites(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListSitesRequest, + service.GetSiteRequest, dict, ], ) -def test_list_sites_rest(request_type): +def test_get_site_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListSitesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Site( + name="name_value", + display_name="display_name_value", + description="description_value", + google_maps_pin_uri="google_maps_pin_uri_value", + notes="notes_value", + customer_site_id="customer_site_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSitesResponse.pb(return_value) + return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_sites(request) + response = client.get_site(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSitesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Site) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.google_maps_pin_uri == "google_maps_pin_uri_value" + assert response.notes == "notes_value" + assert response.customer_site_id == "customer_site_id_value" -def test_list_sites_rest_use_cached_wrapped_rpc(): +def test_get_site_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17708,33 +18513,33 @@ def test_list_sites_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_sites in client._transport._wrapped_methods + assert client._transport.get_site in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_sites] = mock_rpc + client._transport._wrapped_methods[client._transport.get_site] = mock_rpc request = {} - client.list_sites(request) + client.get_site(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_sites(request) + client.get_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): +def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17745,30 +18550,21 @@ def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_sites._get_unset_required_fields(jsonified_request) + ).get_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_sites._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17777,7 +18573,7 @@ def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListSitesResponse() + return_value = resources.Site() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17798,40 +18594,30 @@ def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSitesResponse.pb(return_value) + return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_sites(request) + response = client.get_site(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_sites_rest_unset_required_fields(): +def test_get_site_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_sites._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sites_rest_interceptors(null_interceptor): +def test_get_site_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17844,13 +18630,13 @@ def test_list_sites_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_sites" + transports.GDCHardwareManagementRestInterceptor, "post_get_site" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_sites" + transports.GDCHardwareManagementRestInterceptor, "pre_get_site" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListSitesRequest.pb(service.ListSitesRequest()) + pb_message = service.GetSiteRequest.pb(service.GetSiteRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17861,19 +18647,17 @@ def test_list_sites_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListSitesResponse.to_json( - service.ListSitesResponse() - ) + req.return_value._content = resources.Site.to_json(resources.Site()) - request = service.ListSitesRequest() + request = service.GetSiteRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListSitesResponse() + post.return_value = resources.Site() - client.list_sites( + client.get_site( request, metadata=[ ("key", "val"), @@ -17885,8 +18669,8 @@ def test_list_sites_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_sites_rest_bad_request( - transport: str = "rest", request_type=service.ListSitesRequest +def test_get_site_rest_bad_request( + transport: str = "rest", request_type=service.GetSiteRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17894,7 +18678,7 @@ def test_list_sites_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17906,10 +18690,10 @@ def test_list_sites_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_sites(request) + client.get_site(request) -def test_list_sites_rest_flattened(): +def test_get_site_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17918,163 +18702,212 @@ def test_list_sites_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListSitesResponse() + return_value = resources.Site() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/sites/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListSitesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_sites(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, - args[1], - ) - - -def test_list_sites_rest_flattened_error(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sites( - service.ListSitesRequest(), - parent="parent_value", + name="name_value", ) - - -def test_list_sites_rest_pager(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListSitesResponse( - sites=[ - resources.Site(), - resources.Site(), - resources.Site(), - ], - next_page_token="abc", - ), - service.ListSitesResponse( - sites=[], - next_page_token="def", - ), - service.ListSitesResponse( - sites=[ - resources.Site(), - ], - next_page_token="ghi", - ), - service.ListSitesResponse( - sites=[ - resources.Site(), - resources.Site(), - ], - ), + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Site.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/sites/*}" % client.transport._host, + args[1], ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(service.ListSitesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} +def test_get_site_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_sites(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site( + service.GetSiteRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Site) for i in results) - pages = list(client.list_sites(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - service.GetSiteRequest, + service.CreateSiteRequest, dict, ], ) -def test_get_site_rest(request_type): +def test_create_site_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["site"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "google_maps_pin_uri": "google_maps_pin_uri_value", + "access_times": {}, + "notes": "notes_value", + "customer_site_id": "customer_site_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateSiteRequest.meta.fields["site"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["site"][field])): + del request_init["site"][field][i][subfield] + else: + del request_init["site"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Site( - name="name_value", - display_name="display_name_value", - description="description_value", - google_maps_pin_uri="google_maps_pin_uri_value", - notes="notes_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_site(request) + response = client.create_site(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Site) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.google_maps_pin_uri == "google_maps_pin_uri_value" - assert response.notes == "notes_value" + assert response.operation.name == "operations/spam" -def test_get_site_rest_use_cached_wrapped_rpc(): +def test_create_site_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18088,33 +18921,37 @@ def test_get_site_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_site in client._transport._wrapped_methods + assert client._transport.create_site in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_site] = mock_rpc + client._transport._wrapped_methods[client._transport.create_site] = mock_rpc request = {} - client.get_site(request) + client.create_site(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_site(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): +def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18125,21 +18962,28 @@ def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_site._get_unset_required_fields(jsonified_request) + ).create_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_site._get_unset_required_fields(jsonified_request) + ).create_site._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "site_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18148,7 +18992,7 @@ def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Site() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18160,39 +19004,50 @@ def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_site(request) + response = client.create_site(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_site_rest_unset_required_fields(): +def test_create_site_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_site._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + unset_fields = transport.create_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "siteId", + ) + ) + & set( + ( + "parent", + "site", + ) + ) + ) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_site_rest_interceptors(null_interceptor): +def test_create_site_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18205,13 +19060,15 @@ def test_get_site_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_site" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_site" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_site" + transports.GDCHardwareManagementRestInterceptor, "pre_create_site" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetSiteRequest.pb(service.GetSiteRequest()) + pb_message = service.CreateSiteRequest.pb(service.CreateSiteRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18222,17 +19079,19 @@ def test_get_site_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Site.to_json(resources.Site()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetSiteRequest() + request = service.CreateSiteRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Site() + post.return_value = operations_pb2.Operation() - client.get_site( + client.create_site( request, metadata=[ ("key", "val"), @@ -18244,8 +19103,8 @@ def test_get_site_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_site_rest_bad_request( - transport: str = "rest", request_type=service.GetSiteRequest +def test_create_site_rest_bad_request( + transport: str = "rest", request_type=service.CreateSiteRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18253,7 +19112,7 @@ def test_get_site_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18265,10 +19124,10 @@ def test_get_site_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_site(request) + client.create_site(request) -def test_get_site_rest_flattened(): +def test_create_site_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18277,39 +19136,39 @@ def test_get_site_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Site() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/sites/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_site(**mock_args) + client.create_site(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/sites/*}" % client.transport._host, + "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, args[1], ) -def test_get_site_rest_flattened_error(transport: str = "rest"): +def test_create_site_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18318,13 +19177,15 @@ def test_get_site_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_site( - service.GetSiteRequest(), - name="name_value", + client.create_site( + service.CreateSiteRequest(), + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", ) -def test_get_site_rest_error(): +def test_create_site_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18333,20 +19194,22 @@ def test_get_site_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateSiteRequest, + service.UpdateSiteRequest, dict, ], ) -def test_create_site_rest(request_type): +def test_update_site_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } request_init["site"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/sites/sample3", "display_name": "display_name_value", "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, @@ -18393,13 +19256,14 @@ def test_create_site_rest(request_type): "google_maps_pin_uri": "google_maps_pin_uri_value", "access_times": {}, "notes": "notes_value", + "customer_site_id": "customer_site_id_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateSiteRequest.meta.fields["site"] + test_field = service.UpdateSiteRequest.meta.fields["site"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18475,13 +19339,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_site(request) + response = client.update_site(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_site_rest_use_cached_wrapped_rpc(): +def test_update_site_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18495,17 +19359,17 @@ def test_create_site_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_site in client._transport._wrapped_methods + assert client._transport.update_site in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_site] = mock_rpc + client._transport._wrapped_methods[client._transport.update_site] = mock_rpc request = {} - client.create_site(request) + client.update_site(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -18514,18 +19378,17 @@ def test_create_site_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_site(request) + client.update_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest): +def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18536,28 +19399,24 @@ def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_site._get_unset_required_fields(jsonified_request) + ).update_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_site._get_unset_required_fields(jsonified_request) + ).update_site._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "request_id", - "site_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18578,7 +19437,7 @@ def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18591,29 +19450,29 @@ def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_site(request) + response = client.update_site(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_site_rest_unset_required_fields(): +def test_update_site_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_site._get_unset_required_fields({}) + unset_fields = transport.update_site._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "requestId", - "siteId", + "updateMask", ) ) & set( ( - "parent", + "updateMask", "site", ) ) @@ -18621,7 +19480,7 @@ def test_create_site_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_site_rest_interceptors(null_interceptor): +def test_update_site_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18636,13 +19495,13 @@ def test_create_site_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_site" + transports.GDCHardwareManagementRestInterceptor, "post_update_site" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_site" + transports.GDCHardwareManagementRestInterceptor, "pre_update_site" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateSiteRequest.pb(service.CreateSiteRequest()) + pb_message = service.UpdateSiteRequest.pb(service.UpdateSiteRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18657,7 +19516,7 @@ def test_create_site_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateSiteRequest() + request = service.UpdateSiteRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18665,7 +19524,7 @@ def test_create_site_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_site( + client.update_site( request, metadata=[ ("key", "val"), @@ -18677,8 +19536,8 @@ def test_create_site_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_site_rest_bad_request( - transport: str = "rest", request_type=service.CreateSiteRequest +def test_update_site_rest_bad_request( + transport: str = "rest", request_type=service.UpdateSiteRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18686,7 +19545,9 @@ def test_create_site_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18698,10 +19559,10 @@ def test_create_site_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_site(request) + client.update_site(request) -def test_create_site_rest_flattened(): +def test_update_site_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18713,13 +19574,14 @@ def test_create_site_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", site=resources.Site(name="name_value"), - site_id="site_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -18730,195 +19592,84 @@ def test_create_site_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_site(**mock_args) + client.update_site(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, + "%s/v1alpha/{site.name=projects/*/locations/*/sites/*}" + % client.transport._host, args[1], ) -def test_create_site_rest_flattened_error(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_site( - service.CreateSiteRequest(), - parent="parent_value", - site=resources.Site(name="name_value"), - site_id="site_id_value", - ) - - -def test_create_site_rest_error(): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - service.UpdateSiteRequest, - dict, - ], -) -def test_update_site_rest(request_type): +def test_update_site_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} - } - request_init["site"] = { - "name": "projects/sample1/locations/sample2/sites/sample3", - "display_name": "display_name_value", - "description": "description_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "organization_contact": { - "address": { - "revision": 879, - "region_code": "region_code_value", - "language_code": "language_code_value", - "postal_code": "postal_code_value", - "sorting_code": "sorting_code_value", - "administrative_area": "administrative_area_value", - "locality": "locality_value", - "sublocality": "sublocality_value", - "address_lines": ["address_lines_value1", "address_lines_value2"], - "recipients": ["recipients_value1", "recipients_value2"], - "organization": "organization_value", - }, - "email": "email_value", - "phone": "phone_value", - "contacts": [ - { - "given_name": "given_name_value", - "family_name": "family_name_value", - "email": "email_value", - "phone": "phone_value", - "time_zone": {"id": "id_value", "version": "version_value"}, - "reachable_times": [ - { - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "end_time": {}, - "days": [1], - } - ], - } - ], - }, - "google_maps_pin_uri": "google_maps_pin_uri_value", - "access_times": {}, - "notes": "notes_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateSiteRequest.meta.fields["site"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_site( + service.UpdateSiteRequest(), + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["site"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_update_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["site"][field])): - del request_init["site"][field][i][subfield] - else: - del request_init["site"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + service.ListHardwareGroupsRequest, + dict, + ], +) +def test_list_hardware_groups_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_site(request) + response = client.list_hardware_groups(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListHardwareGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_site_rest_use_cached_wrapped_rpc(): +def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18932,36 +19683,39 @@ def test_update_site_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_site in client._transport._wrapped_methods + assert ( + client._transport.list_hardware_groups in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_site] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_hardware_groups + ] = mock_rpc request = {} - client.update_site(request) + client.list_hardware_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_site(request) + client.list_hardware_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest): +def test_list_hardware_groups_rest_required_fields( + request_type=service.ListHardwareGroupsRequest, +): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18972,24 +19726,30 @@ def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_site._get_unset_required_fields(jsonified_request) + ).list_hardware_groups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_site._get_unset_required_fields(jsonified_request) + ).list_hardware_groups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18998,7 +19758,7 @@ def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareGroupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19010,50 +19770,49 @@ def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_site(request) + response = client.list_hardware_groups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_site_rest_unset_required_fields(): +def test_list_hardware_groups_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_site._get_unset_required_fields({}) + unset_fields = transport.list_hardware_groups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "site", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_site_rest_interceptors(null_interceptor): +def test_list_hardware_groups_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19066,15 +19825,15 @@ def test_update_site_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_site" + transports.GDCHardwareManagementRestInterceptor, "post_list_hardware_groups" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_site" + transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware_groups" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateSiteRequest.pb(service.UpdateSiteRequest()) + pb_message = service.ListHardwareGroupsRequest.pb( + service.ListHardwareGroupsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19085,19 +19844,19 @@ def test_update_site_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListHardwareGroupsResponse.to_json( + service.ListHardwareGroupsResponse() ) - request = service.UpdateSiteRequest() + request = service.ListHardwareGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListHardwareGroupsResponse() - client.update_site( + client.list_hardware_groups( request, metadata=[ ("key", "val"), @@ -19109,8 +19868,8 @@ def test_update_site_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_site_rest_bad_request( - transport: str = "rest", request_type=service.UpdateSiteRequest +def test_list_hardware_groups_rest_bad_request( + transport: str = "rest", request_type=service.ListHardwareGroupsRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19118,9 +19877,7 @@ def test_update_site_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} - } + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19132,10 +19889,10 @@ def test_update_site_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_site(request) + client.list_hardware_groups(request) -def test_update_site_rest_flattened(): +def test_list_hardware_groups_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19144,105 +19901,168 @@ def test_update_site_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareGroupsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - site=resources.Site(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_site(**mock_args) + client.list_hardware_groups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{site.name=projects/*/locations/*/sites/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" % client.transport._host, args[1], ) -def test_update_site_rest_flattened_error(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_site( - service.UpdateSiteRequest(), - site=resources.Site(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) +def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hardware_groups( + service.ListHardwareGroupsRequest(), + parent="parent_value", + ) + + +def test_list_hardware_groups_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListHardwareGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_hardware_groups(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.HardwareGroup) for i in results) -def test_update_site_rest_error(): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_hardware_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.ListHardwareGroupsRequest, + service.GetHardwareGroupRequest, dict, ], ) -def test_list_hardware_groups_rest(request_type): +def test_get_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareGroupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.HardwareGroup( + name="name_value", + hardware_count=1494, + site="site_value", + state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareGroupsResponse.pb(return_value) + return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware_groups(request) + response = client.get_hardware_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHardwareGroupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.HardwareGroup) + assert response.name == "name_value" + assert response.hardware_count == 1494 + assert response.site == "site_value" + assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED + assert response.zone == "zone_value" -def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): +def test_get_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19257,7 +20077,7 @@ def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_hardware_groups in client._transport._wrapped_methods + client._transport.get_hardware_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -19266,29 +20086,29 @@ def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_hardware_groups + client._transport.get_hardware_group ] = mock_rpc request = {} - client.list_hardware_groups(request) + client.get_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_hardware_groups(request) + client.get_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_hardware_groups_rest_required_fields( - request_type=service.ListHardwareGroupsRequest, +def test_get_hardware_group_rest_required_fields( + request_type=service.GetHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19299,30 +20119,21 @@ def test_list_hardware_groups_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware_groups._get_unset_required_fields(jsonified_request) + ).get_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware_groups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19331,7 +20142,7 @@ def test_list_hardware_groups_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListHardwareGroupsResponse() + return_value = resources.HardwareGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19352,40 +20163,30 @@ def test_list_hardware_groups_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareGroupsResponse.pb(return_value) + return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware_groups(request) + response = client.get_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_hardware_groups_rest_unset_required_fields(): +def test_get_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_hardware_groups._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_hardware_groups_rest_interceptors(null_interceptor): +def test_get_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19398,14 +20199,14 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_hardware_groups" + transports.GDCHardwareManagementRestInterceptor, "post_get_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware_groups" + transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListHardwareGroupsRequest.pb( - service.ListHardwareGroupsRequest() + pb_message = service.GetHardwareGroupRequest.pb( + service.GetHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -19417,19 +20218,19 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListHardwareGroupsResponse.to_json( - service.ListHardwareGroupsResponse() + req.return_value._content = resources.HardwareGroup.to_json( + resources.HardwareGroup() ) - request = service.ListHardwareGroupsRequest() + request = service.GetHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListHardwareGroupsResponse() + post.return_value = resources.HardwareGroup() - client.list_hardware_groups( + client.get_hardware_group( request, metadata=[ ("key", "val"), @@ -19441,8 +20242,8 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_hardware_groups_rest_bad_request( - transport: str = "rest", request_type=service.ListHardwareGroupsRequest +def test_get_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.GetHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19450,7 +20251,9 @@ def test_list_hardware_groups_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19462,10 +20265,10 @@ def test_list_hardware_groups_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_hardware_groups(request) + client.get_hardware_group(request) -def test_list_hardware_groups_rest_flattened(): +def test_get_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19474,14 +20277,16 @@ def test_list_hardware_groups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareGroupsResponse() + return_value = resources.HardwareGroup() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -19489,25 +20294,25 @@ def test_list_hardware_groups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareGroupsResponse.pb(return_value) + return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_hardware_groups(**mock_args) + client.get_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" % client.transport._host, args[1], ) -def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): +def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19516,126 +20321,137 @@ def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_hardware_groups( - service.ListHardwareGroupsRequest(), - parent="parent_value", + client.get_hardware_group( + service.GetHardwareGroupRequest(), + name="name_value", ) -def test_list_hardware_groups_rest_pager(transport: str = "rest"): +def test_get_hardware_group_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateHardwareGroupRequest, + dict, + ], +) +def test_create_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListHardwareGroupsResponse( - hardware_groups=[ - resources.HardwareGroup(), - resources.HardwareGroup(), - resources.HardwareGroup(), - ], - next_page_token="abc", - ), - service.ListHardwareGroupsResponse( - hardware_groups=[], - next_page_token="def", - ), - service.ListHardwareGroupsResponse( - hardware_groups=[ - resources.HardwareGroup(), - ], - next_page_token="ghi", - ), - service.ListHardwareGroupsResponse( - hardware_groups=[ - resources.HardwareGroup(), - resources.HardwareGroup(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init["hardware_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "hardware_count": 1494, + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "site": "site_value", + "state": 1, + "zone": "zone_value", + "requested_installation_date": {"year": 433, "month": 550, "day": 318}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the values into proper Response objs - response = tuple( - service.ListHardwareGroupsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateHardwareGroupRequest.meta.fields["hardware_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - pager = client.list_hardware_groups(request=sample_request) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.HardwareGroup) for i in results) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - pages = list(client.list_hardware_groups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - service.GetHardwareGroupRequest, - dict, - ], -) -def test_get_hardware_group_rest(request_type): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware_group"][field])): + del request_init["hardware_group"][field][i][subfield] + else: + del request_init["hardware_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.HardwareGroup( - name="name_value", - hardware_count=1494, - site="site_value", - state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, - zone="zone_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware_group(request) + response = client.create_hardware_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.HardwareGroup) - assert response.name == "name_value" - assert response.hardware_count == 1494 - assert response.site == "site_value" - assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED - assert response.zone == "zone_value" + assert response.operation.name == "operations/spam" -def test_get_hardware_group_rest_use_cached_wrapped_rpc(): +def test_create_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19650,7 +20466,8 @@ def test_get_hardware_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_hardware_group in client._transport._wrapped_methods + client._transport.create_hardware_group + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -19659,29 +20476,33 @@ def test_get_hardware_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_hardware_group + client._transport.create_hardware_group ] = mock_rpc request = {} - client.get_hardware_group(request) + client.create_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_hardware_group(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_hardware_group_rest_required_fields( - request_type=service.GetHardwareGroupRequest, +def test_create_hardware_group_rest_required_fields( + request_type=service.CreateHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19692,21 +20513,28 @@ def test_get_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware_group._get_unset_required_fields(jsonified_request) + ).create_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware_group._get_unset_required_fields(jsonified_request) + ).create_hardware_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "hardware_group_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19715,7 +20543,7 @@ def test_get_hardware_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.HardwareGroup() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19727,39 +20555,50 @@ def test_get_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware_group(request) + response = client.create_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_hardware_group_rest_unset_required_fields(): +def test_create_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_hardware_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "hardwareGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "hardwareGroup", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_hardware_group_rest_interceptors(null_interceptor): +def test_create_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19772,14 +20611,16 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_hardware_group" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetHardwareGroupRequest.pb( - service.GetHardwareGroupRequest() + pb_message = service.CreateHardwareGroupRequest.pb( + service.CreateHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -19791,19 +20632,19 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.HardwareGroup.to_json( - resources.HardwareGroup() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = service.GetHardwareGroupRequest() + request = service.CreateHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.HardwareGroup() + post.return_value = operations_pb2.Operation() - client.get_hardware_group( + client.create_hardware_group( request, metadata=[ ("key", "val"), @@ -19815,8 +20656,8 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.GetHardwareGroupRequest +def test_create_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.CreateHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19824,9 +20665,7 @@ def test_get_hardware_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19838,10 +20677,10 @@ def test_get_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_hardware_group(request) + client.create_hardware_group(request) -def test_get_hardware_group_rest_flattened(): +def test_create_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19850,42 +20689,40 @@ def test_get_hardware_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.HardwareGroup() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_hardware_group(**mock_args) + client.create_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" % client.transport._host, args[1], ) -def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19894,13 +20731,15 @@ def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_hardware_group( - service.GetHardwareGroupRequest(), - name="name_value", + client.create_hardware_group( + service.CreateHardwareGroupRequest(), + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", ) -def test_get_hardware_group_rest_error(): +def test_create_hardware_group_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19909,20 +20748,24 @@ def test_get_hardware_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateHardwareGroupRequest, + service.UpdateHardwareGroupRequest, dict, ], ) -def test_create_hardware_group_rest(request_type): +def test_update_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } request_init["hardware_group"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "labels": {}, @@ -19942,7 +20785,7 @@ def test_create_hardware_group_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateHardwareGroupRequest.meta.fields["hardware_group"] + test_field = service.UpdateHardwareGroupRequest.meta.fields["hardware_group"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20018,13 +20861,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware_group(request) + response = client.update_hardware_group(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_hardware_group_rest_use_cached_wrapped_rpc(): +def test_update_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20039,7 +20882,7 @@ def test_create_hardware_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_hardware_group + client._transport.update_hardware_group in client._transport._wrapped_methods ) @@ -20049,11 +20892,11 @@ def test_create_hardware_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_hardware_group + client._transport.update_hardware_group ] = mock_rpc request = {} - client.create_hardware_group(request) + client.update_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20062,20 +20905,19 @@ def test_create_hardware_group_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_hardware_group(request) + client.update_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_hardware_group_rest_required_fields( - request_type=service.CreateHardwareGroupRequest, +def test_update_hardware_group_rest_required_fields( + request_type=service.UpdateHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20086,28 +20928,24 @@ def test_create_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware_group._get_unset_required_fields(jsonified_request) + ).update_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware_group._get_unset_required_fields(jsonified_request) + ).update_hardware_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "hardware_group_id", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20128,7 +20966,7 @@ def test_create_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -20141,29 +20979,29 @@ def test_create_hardware_group_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware_group(request) + response = client.update_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_hardware_group_rest_unset_required_fields(): +def test_update_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_hardware_group._get_unset_required_fields({}) + unset_fields = transport.update_hardware_group._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "hardwareGroupId", "requestId", + "updateMask", ) ) & set( ( - "parent", + "updateMask", "hardwareGroup", ) ) @@ -20171,7 +21009,7 @@ def test_create_hardware_group_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_hardware_group_rest_interceptors(null_interceptor): +def test_update_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20186,14 +21024,14 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "post_update_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateHardwareGroupRequest.pb( - service.CreateHardwareGroupRequest() + pb_message = service.UpdateHardwareGroupRequest.pb( + service.UpdateHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -20209,7 +21047,7 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateHardwareGroupRequest() + request = service.UpdateHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20217,7 +21055,7 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_hardware_group( + client.update_hardware_group( request, metadata=[ ("key", "val"), @@ -20229,8 +21067,8 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.CreateHardwareGroupRequest +def test_update_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.UpdateHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20238,7 +21076,11 @@ def test_create_hardware_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20250,10 +21092,10 @@ def test_create_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_hardware_group(request) + client.update_hardware_group(request) -def test_create_hardware_group_rest_flattened(): +def test_update_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20265,13 +21107,16 @@ def test_create_hardware_group_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", hardware_group=resources.HardwareGroup(name="name_value"), - hardware_group_id="hardware_group_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -20282,20 +21127,20 @@ def test_create_hardware_group_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_hardware_group(**mock_args) + client.update_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" + "%s/v1alpha/{hardware_group.name=projects/*/locations/*/orders/*/hardwareGroups/*}" % client.transport._host, args[1], ) -def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20304,15 +21149,14 @@ def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_hardware_group( - service.CreateHardwareGroupRequest(), - parent="parent_value", + client.update_hardware_group( + service.UpdateHardwareGroupRequest(), hardware_group=resources.HardwareGroup(name="name_value"), - hardware_group_id="hardware_group_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_hardware_group_rest_error(): +def test_update_hardware_group_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20321,11 +21165,11 @@ def test_create_hardware_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateHardwareGroupRequest, + service.DeleteHardwareGroupRequest, dict, ], ) -def test_update_hardware_group_rest(request_type): +def test_delete_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20333,93 +21177,8 @@ def test_update_hardware_group_rest(request_type): # send a request that will satisfy transcoding request_init = { - "hardware_group": { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } - } - request_init["hardware_group"] = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "hardware_count": 1494, - "config": { - "sku": "sku_value", - "power_supply": 1, - "subscription_duration_months": 3042, - }, - "site": "site_value", - "state": 1, - "zone": "zone_value", - "requested_installation_date": {"year": 433, "month": 550, "day": 318}, + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateHardwareGroupRequest.meta.fields["hardware_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["hardware_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["hardware_group"][field])): - del request_init["hardware_group"][field][i][subfield] - else: - del request_init["hardware_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20434,13 +21193,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware_group(request) + response = client.delete_hardware_group(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_hardware_group_rest_use_cached_wrapped_rpc(): +def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20455,7 +21214,7 @@ def test_update_hardware_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_hardware_group + client._transport.delete_hardware_group in client._transport._wrapped_methods ) @@ -20465,11 +21224,11 @@ def test_update_hardware_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_hardware_group + client._transport.delete_hardware_group ] = mock_rpc request = {} - client.update_hardware_group(request) + client.delete_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20478,19 +21237,20 @@ def test_update_hardware_group_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_hardware_group(request) + client.delete_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_hardware_group_rest_required_fields( - request_type=service.UpdateHardwareGroupRequest, +def test_delete_hardware_group_rest_required_fields( + request_type=service.DeleteHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20501,24 +21261,23 @@ def test_update_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware_group._get_unset_required_fields(jsonified_request) + ).delete_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware_group._get_unset_required_fields(jsonified_request) + ).delete_hardware_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20539,10 +21298,9 @@ def test_update_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20552,37 +21310,24 @@ def test_update_hardware_group_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware_group(request) + response = client.delete_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_hardware_group_rest_unset_required_fields(): +def test_delete_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_hardware_group._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "hardwareGroup", - ) - ) - ) + unset_fields = transport.delete_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_hardware_group_rest_interceptors(null_interceptor): +def test_delete_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20597,14 +21342,14 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateHardwareGroupRequest.pb( - service.UpdateHardwareGroupRequest() + pb_message = service.DeleteHardwareGroupRequest.pb( + service.DeleteHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -20620,7 +21365,7 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateHardwareGroupRequest() + request = service.DeleteHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20628,7 +21373,7 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_hardware_group( + client.delete_hardware_group( request, metadata=[ ("key", "val"), @@ -20640,8 +21385,8 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.UpdateHardwareGroupRequest +def test_delete_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.DeleteHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20650,9 +21395,7 @@ def test_update_hardware_group_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "hardware_group": { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" } request = request_type(**request_init) @@ -20665,10 +21408,10 @@ def test_update_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_hardware_group(request) + client.delete_hardware_group(request) -def test_update_hardware_group_rest_flattened(): +def test_delete_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20681,15 +21424,12 @@ def test_update_hardware_group_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "hardware_group": { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" } # get truthy value for each flattened field mock_args = dict( - hardware_group=resources.HardwareGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -20700,20 +21440,20 @@ def test_update_hardware_group_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_hardware_group(**mock_args) + client.delete_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{hardware_group.name=projects/*/locations/*/orders/*/hardwareGroups/*}" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" % client.transport._host, args[1], ) -def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20722,14 +21462,13 @@ def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_hardware_group( - service.UpdateHardwareGroupRequest(), - hardware_group=resources.HardwareGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_hardware_group( + service.DeleteHardwareGroupRequest(), + name="name_value", ) -def test_update_hardware_group_rest_error(): +def test_delete_hardware_group_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20738,41 +21477,46 @@ def test_update_hardware_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteHardwareGroupRequest, + service.ListHardwareRequest, dict, ], ) -def test_delete_hardware_group_rest(request_type): +def test_list_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware_group(request) + response = client.list_hardware(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListHardwarePager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): +def test_list_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20786,44 +21530,33 @@ def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_hardware_group - in client._transport._wrapped_methods - ) + assert client._transport.list_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_hardware_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_hardware] = mock_rpc request = {} - client.delete_hardware_group(request) + client.list_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_hardware_group(request) + client.list_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_hardware_group_rest_required_fields( - request_type=service.DeleteHardwareGroupRequest, -): +def test_list_hardware_rest_required_fields(request_type=service.ListHardwareRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20834,23 +21567,30 @@ def test_delete_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware_group._get_unset_required_fields(jsonified_request) + ).list_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware_group._get_unset_required_fields(jsonified_request) + ).list_hardware._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20859,7 +21599,7 @@ def test_delete_hardware_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20871,36 +21611,49 @@ def test_delete_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware_group(request) + response = client.list_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_hardware_group_rest_unset_required_fields(): +def test_list_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_hardware_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_hardware_group_rest_interceptors(null_interceptor): +def test_list_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20913,17 +21666,13 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "post_list_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteHardwareGroupRequest.pb( - service.DeleteHardwareGroupRequest() - ) + pb_message = service.ListHardwareRequest.pb(service.ListHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20934,19 +21683,19 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListHardwareResponse.to_json( + service.ListHardwareResponse() ) - request = service.DeleteHardwareGroupRequest() + request = service.ListHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListHardwareResponse() - client.delete_hardware_group( + client.list_hardware( request, metadata=[ ("key", "val"), @@ -20958,8 +21707,8 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.DeleteHardwareGroupRequest +def test_list_hardware_rest_bad_request( + transport: str = "rest", request_type=service.ListHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20967,9 +21716,7 @@ def test_delete_hardware_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20981,10 +21728,10 @@ def test_delete_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_hardware_group(request) + client.list_hardware(request) -def test_delete_hardware_group_rest_flattened(): +def test_list_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20993,40 +21740,40 @@ def test_delete_hardware_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_hardware_group(**mock_args) + client.list_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/hardware" % client.transport._host, args[1], ) -def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_list_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21035,61 +21782,128 @@ def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_hardware_group( - service.DeleteHardwareGroupRequest(), - name="name_value", + client.list_hardware( + service.ListHardwareRequest(), + parent="parent_value", ) -def test_delete_hardware_group_rest_error(): +def test_list_hardware_rest_pager(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListHardwareResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_hardware(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Hardware) for i in results) + + pages = list(client.list_hardware(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListHardwareRequest, + service.GetHardwareRequest, dict, ], ) -def test_list_hardware_rest(request_type): +def test_get_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Hardware( + name="name_value", + display_name="display_name_value", + order="order_value", + hardware_group="hardware_group_value", + site="site_value", + state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareResponse.pb(return_value) + return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware(request) + response = client.get_hardware(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHardwarePager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Hardware) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.order == "order_value" + assert response.hardware_group == "hardware_group_value" + assert response.site == "site_value" + assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.zone == "zone_value" -def test_list_hardware_rest_use_cached_wrapped_rpc(): +def test_get_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21103,33 +21917,33 @@ def test_list_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_hardware in client._transport._wrapped_methods + assert client._transport.get_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.get_hardware] = mock_rpc request = {} - client.list_hardware(request) + client.get_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_hardware(request) + client.get_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_hardware_rest_required_fields(request_type=service.ListHardwareRequest): +def test_get_hardware_rest_required_fields(request_type=service.GetHardwareRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21140,30 +21954,21 @@ def test_list_hardware_rest_required_fields(request_type=service.ListHardwareReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware._get_unset_required_fields(jsonified_request) + ).get_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21172,7 +21977,7 @@ def test_list_hardware_rest_required_fields(request_type=service.ListHardwareReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListHardwareResponse() + return_value = resources.Hardware() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21193,40 +21998,30 @@ def test_list_hardware_rest_required_fields(request_type=service.ListHardwareReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareResponse.pb(return_value) + return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware(request) + response = client.get_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_hardware_rest_unset_required_fields(): +def test_get_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_hardware._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_hardware._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_hardware_rest_interceptors(null_interceptor): +def test_get_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21239,13 +22034,13 @@ def test_list_hardware_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_get_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListHardwareRequest.pb(service.ListHardwareRequest()) + pb_message = service.GetHardwareRequest.pb(service.GetHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21256,19 +22051,17 @@ def test_list_hardware_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListHardwareResponse.to_json( - service.ListHardwareResponse() - ) + req.return_value._content = resources.Hardware.to_json(resources.Hardware()) - request = service.ListHardwareRequest() + request = service.GetHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListHardwareResponse() + post.return_value = resources.Hardware() - client.list_hardware( + client.get_hardware( request, metadata=[ ("key", "val"), @@ -21280,8 +22073,8 @@ def test_list_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_hardware_rest_bad_request( - transport: str = "rest", request_type=service.ListHardwareRequest +def test_get_hardware_rest_bad_request( + transport: str = "rest", request_type=service.GetHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21289,7 +22082,7 @@ def test_list_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21301,10 +22094,10 @@ def test_list_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_hardware(request) + client.get_hardware(request) -def test_list_hardware_rest_flattened(): +def test_get_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21313,14 +22106,14 @@ def test_list_hardware_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareResponse() + return_value = resources.Hardware() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -21328,25 +22121,25 @@ def test_list_hardware_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareResponse.pb(return_value) + return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_hardware(**mock_args) + client.get_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/hardware" + "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" % client.transport._host, args[1], ) -def test_list_hardware_rest_flattened_error(transport: str = "rest"): +def test_get_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21355,128 +22148,160 @@ def test_list_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_hardware( - service.ListHardwareRequest(), - parent="parent_value", + client.get_hardware( + service.GetHardwareRequest(), + name="name_value", ) -def test_list_hardware_rest_pager(transport: str = "rest"): +def test_get_hardware_rest_error(): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListHardwareResponse( - hardware=[ - resources.Hardware(), - resources.Hardware(), - resources.Hardware(), - ], - next_page_token="abc", - ), - service.ListHardwareResponse( - hardware=[], - next_page_token="def", - ), - service.ListHardwareResponse( - hardware=[ - resources.Hardware(), - ], - next_page_token="ghi", - ), - service.ListHardwareResponse( - hardware=[ - resources.Hardware(), - resources.Hardware(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListHardwareResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_hardware(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Hardware) for i in results) - - pages = list(client.list_hardware(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GetHardwareRequest, + service.CreateHardwareRequest, dict, ], ) -def test_get_hardware_rest(request_type): +def test_create_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["hardware"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "order": "order_value", + "hardware_group": "hardware_group_value", + "site": "site_value", + "state": 1, + "ciq_uri": "ciq_uri_value", + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "estimated_installation_date": {"year": 433, "month": 550, "day": 318}, + "physical_info": { + "power_receptacle": 1, + "network_uplink": 1, + "voltage": 1, + "amperes": 1, + }, + "installation_info": { + "rack_location": "rack_location_value", + "power_distance_meters": 2246, + "switch_distance_meters": 2347, + "rack_unit_dimensions": { + "width_inches": 0.1273, + "height_inches": 0.13620000000000002, + "depth_inches": 0.1262, + }, + "rack_space": {"start_rack_unit": 1613, "end_rack_unit": 1366}, + "rack_type": 1, + }, + "zone": "zone_value", + "requested_installation_date": {}, + "actual_installation_date": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateHardwareRequest.meta.fields["hardware"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware"][field])): + del request_init["hardware"][field][i][subfield] + else: + del request_init["hardware"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Hardware( - name="name_value", - display_name="display_name_value", - order="order_value", - hardware_group="hardware_group_value", - site="site_value", - state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - zone="zone_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware(request) + response = client.create_hardware(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Hardware) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.order == "order_value" - assert response.hardware_group == "hardware_group_value" - assert response.site == "site_value" - assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED - assert response.ciq_uri == "ciq_uri_value" - assert response.zone == "zone_value" + assert response.operation.name == "operations/spam" -def test_get_hardware_rest_use_cached_wrapped_rpc(): +def test_create_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21490,33 +22315,39 @@ def test_get_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_hardware in client._transport._wrapped_methods + assert client._transport.create_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.create_hardware] = mock_rpc request = {} - client.get_hardware(request) + client.create_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_hardware(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_hardware_rest_required_fields(request_type=service.GetHardwareRequest): +def test_create_hardware_rest_required_fields( + request_type=service.CreateHardwareRequest, +): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21527,21 +22358,23 @@ def test_get_hardware_rest_required_fields(request_type=service.GetHardwareReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware._get_unset_required_fields(jsonified_request) + ).create_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware._get_unset_required_fields(jsonified_request) + ).create_hardware._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("hardware_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21550,7 +22383,7 @@ def test_get_hardware_rest_required_fields(request_type=service.GetHardwareReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Hardware() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21562,39 +22395,45 @@ def test_get_hardware_rest_required_fields(request_type=service.GetHardwareReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware(request) + response = client.create_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_hardware_rest_unset_required_fields(): +def test_create_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_hardware._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("hardwareId",)) + & set( + ( + "parent", + "hardware", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_hardware_rest_interceptors(null_interceptor): +def test_create_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21607,13 +22446,15 @@ def test_get_hardware_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_hardware" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetHardwareRequest.pb(service.GetHardwareRequest()) + pb_message = service.CreateHardwareRequest.pb(service.CreateHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21624,17 +22465,19 @@ def test_get_hardware_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Hardware.to_json(resources.Hardware()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetHardwareRequest() + request = service.CreateHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Hardware() + post.return_value = operations_pb2.Operation() - client.get_hardware( + client.create_hardware( request, metadata=[ ("key", "val"), @@ -21646,8 +22489,8 @@ def test_get_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_hardware_rest_bad_request( - transport: str = "rest", request_type=service.GetHardwareRequest +def test_create_hardware_rest_bad_request( + transport: str = "rest", request_type=service.CreateHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21655,7 +22498,7 @@ def test_get_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21667,10 +22510,10 @@ def test_get_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_hardware(request) + client.create_hardware(request) -def test_get_hardware_rest_flattened(): +def test_create_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21679,40 +22522,40 @@ def test_get_hardware_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Hardware() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_hardware(**mock_args) + client.create_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/hardware" % client.transport._host, args[1], ) -def test_get_hardware_rest_flattened_error(transport: str = "rest"): +def test_create_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21721,13 +22564,15 @@ def test_get_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_hardware( - service.GetHardwareRequest(), - name="name_value", + client.create_hardware( + service.CreateHardwareRequest(), + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", ) -def test_get_hardware_rest_error(): +def test_create_hardware_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21736,20 +22581,22 @@ def test_get_hardware_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateHardwareRequest, + service.UpdateHardwareRequest, dict, ], ) -def test_create_hardware_rest(request_type): +def test_update_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } request_init["hardware"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/hardware/sample3", "display_name": "display_name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -21792,7 +22639,7 @@ def test_create_hardware_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateHardwareRequest.meta.fields["hardware"] + test_field = service.UpdateHardwareRequest.meta.fields["hardware"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21868,13 +22715,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware(request) + response = client.update_hardware(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_hardware_rest_use_cached_wrapped_rpc(): +def test_update_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21888,17 +22735,17 @@ def test_create_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_hardware in client._transport._wrapped_methods + assert client._transport.update_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.update_hardware] = mock_rpc request = {} - client.create_hardware(request) + client.update_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -21907,20 +22754,19 @@ def test_create_hardware_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_hardware(request) + client.update_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_hardware_rest_required_fields( - request_type=service.CreateHardwareRequest, +def test_update_hardware_rest_required_fields( + request_type=service.UpdateHardwareRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21931,23 +22777,24 @@ def test_create_hardware_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware._get_unset_required_fields(jsonified_request) + ).update_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware._get_unset_required_fields(jsonified_request) + ).update_hardware._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("hardware_id",)) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21968,7 +22815,7 @@ def test_create_hardware_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21981,24 +22828,29 @@ def test_create_hardware_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware(request) + response = client.update_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_hardware_rest_unset_required_fields(): +def test_update_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_hardware._get_unset_required_fields({}) + unset_fields = transport.update_hardware._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("hardwareId",)) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( - "parent", + "updateMask", "hardware", ) ) @@ -22006,7 +22858,7 @@ def test_create_hardware_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_hardware_rest_interceptors(null_interceptor): +def test_update_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22021,13 +22873,13 @@ def test_create_hardware_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_update_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateHardwareRequest.pb(service.CreateHardwareRequest()) + pb_message = service.UpdateHardwareRequest.pb(service.UpdateHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22042,7 +22894,7 @@ def test_create_hardware_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateHardwareRequest() + request = service.UpdateHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22050,7 +22902,7 @@ def test_create_hardware_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_hardware( + client.update_hardware( request, metadata=[ ("key", "val"), @@ -22062,8 +22914,8 @@ def test_create_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_hardware_rest_bad_request( - transport: str = "rest", request_type=service.CreateHardwareRequest +def test_update_hardware_rest_bad_request( + transport: str = "rest", request_type=service.UpdateHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22071,7 +22923,9 @@ def test_create_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22083,10 +22937,10 @@ def test_create_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_hardware(request) + client.update_hardware(request) -def test_create_hardware_rest_flattened(): +def test_update_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22098,13 +22952,14 @@ def test_create_hardware_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", hardware=resources.Hardware(name="name_value"), - hardware_id="hardware_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -22115,37 +22970,36 @@ def test_create_hardware_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_hardware(**mock_args) + client.update_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/hardware" + "%s/v1alpha/{hardware.name=projects/*/locations/*/hardware/*}" % client.transport._host, args[1], ) -def test_create_hardware_rest_flattened_error(transport: str = "rest"): +def test_update_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_hardware( - service.CreateHardwareRequest(), - parent="parent_value", + # fields is an error. + with pytest.raises(ValueError): + client.update_hardware( + service.UpdateHardwareRequest(), hardware=resources.Hardware(name="name_value"), - hardware_id="hardware_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_hardware_rest_error(): +def test_update_hardware_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22154,126 +23008,18 @@ def test_create_hardware_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateHardwareRequest, + service.DeleteHardwareRequest, dict, ], ) -def test_update_hardware_rest(request_type): +def test_delete_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} - } - request_init["hardware"] = { - "name": "projects/sample1/locations/sample2/hardware/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "order": "order_value", - "hardware_group": "hardware_group_value", - "site": "site_value", - "state": 1, - "ciq_uri": "ciq_uri_value", - "config": { - "sku": "sku_value", - "power_supply": 1, - "subscription_duration_months": 3042, - }, - "estimated_installation_date": {"year": 433, "month": 550, "day": 318}, - "physical_info": { - "power_receptacle": 1, - "network_uplink": 1, - "voltage": 1, - "amperes": 1, - }, - "installation_info": { - "rack_location": "rack_location_value", - "power_distance_meters": 2246, - "switch_distance_meters": 2347, - "rack_unit_dimensions": { - "width_inches": 0.1273, - "height_inches": 0.13620000000000002, - "depth_inches": 0.1262, - }, - "rack_space": {"start_rack_unit": 1613, "end_rack_unit": 1366}, - "rack_type": 1, - }, - "zone": "zone_value", - "requested_installation_date": {}, - "actual_installation_date": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateHardwareRequest.meta.fields["hardware"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["hardware"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["hardware"][field])): - del request_init["hardware"][field][i][subfield] - else: - del request_init["hardware"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22288,13 +23034,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware(request) + response = client.delete_hardware(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_hardware_rest_use_cached_wrapped_rpc(): +def test_delete_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22308,17 +23054,17 @@ def test_update_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_hardware in client._transport._wrapped_methods + assert client._transport.delete_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_hardware] = mock_rpc request = {} - client.update_hardware(request) + client.delete_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22327,19 +23073,20 @@ def test_update_hardware_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_hardware(request) + client.delete_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_hardware_rest_required_fields( - request_type=service.UpdateHardwareRequest, +def test_delete_hardware_rest_required_fields( + request_type=service.DeleteHardwareRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22350,24 +23097,23 @@ def test_update_hardware_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware._get_unset_required_fields(jsonified_request) + ).delete_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware._get_unset_required_fields(jsonified_request) + ).delete_hardware._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22388,10 +23134,9 @@ def test_update_hardware_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22401,37 +23146,24 @@ def test_update_hardware_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware(request) + response = client.delete_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_hardware_rest_unset_required_fields(): +def test_delete_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_hardware._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "hardware", - ) - ) - ) + unset_fields = transport.delete_hardware._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_hardware_rest_interceptors(null_interceptor): +def test_delete_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22446,13 +23178,13 @@ def test_update_hardware_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateHardwareRequest.pb(service.UpdateHardwareRequest()) + pb_message = service.DeleteHardwareRequest.pb(service.DeleteHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22467,7 +23199,7 @@ def test_update_hardware_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateHardwareRequest() + request = service.DeleteHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22475,7 +23207,7 @@ def test_update_hardware_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_hardware( + client.delete_hardware( request, metadata=[ ("key", "val"), @@ -22487,8 +23219,8 @@ def test_update_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_hardware_rest_bad_request( - transport: str = "rest", request_type=service.UpdateHardwareRequest +def test_delete_hardware_rest_bad_request( + transport: str = "rest", request_type=service.DeleteHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22496,9 +23228,7 @@ def test_update_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22510,10 +23240,10 @@ def test_update_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_hardware(request) + client.delete_hardware(request) -def test_update_hardware_rest_flattened(): +def test_delete_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22525,14 +23255,11 @@ def test_update_hardware_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} # get truthy value for each flattened field mock_args = dict( - hardware=resources.Hardware(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -22543,20 +23270,20 @@ def test_update_hardware_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_hardware(**mock_args) + client.delete_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{hardware.name=projects/*/locations/*/hardware/*}" + "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" % client.transport._host, args[1], ) -def test_update_hardware_rest_flattened_error(transport: str = "rest"): +def test_delete_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22565,14 +23292,13 @@ def test_update_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_hardware( - service.UpdateHardwareRequest(), - hardware=resources.Hardware(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_hardware( + service.DeleteHardwareRequest(), + name="name_value", ) -def test_update_hardware_rest_error(): +def test_delete_hardware_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22581,39 +23307,46 @@ def test_update_hardware_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteHardwareRequest, + service.ListCommentsRequest, dict, ], ) -def test_delete_hardware_rest(request_type): +def test_list_comments_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListCommentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware(request) + response = client.list_comments(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListCommentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_delete_hardware_rest_use_cached_wrapped_rpc(): +def test_list_comments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22627,39 +23360,33 @@ def test_delete_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_hardware in client._transport._wrapped_methods + assert client._transport.list_comments in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.list_comments] = mock_rpc request = {} - client.delete_hardware(request) + client.list_comments(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - client.delete_hardware(request) + client.list_comments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_hardware_rest_required_fields( - request_type=service.DeleteHardwareRequest, -): +def test_list_comments_rest_required_fields(request_type=service.ListCommentsRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22670,23 +23397,30 @@ def test_delete_hardware_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware._get_unset_required_fields(jsonified_request) + ).list_comments._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware._get_unset_required_fields(jsonified_request) + ).list_comments._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22695,7 +23429,7 @@ def test_delete_hardware_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListCommentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22707,36 +23441,49 @@ def test_delete_hardware_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware(request) + response = client.list_comments(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_hardware_rest_unset_required_fields(): +def test_list_comments_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_hardware._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_comments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_hardware_rest_interceptors(null_interceptor): +def test_list_comments_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22749,15 +23496,13 @@ def test_delete_hardware_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_list_comments" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_list_comments" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteHardwareRequest.pb(service.DeleteHardwareRequest()) + pb_message = service.ListCommentsRequest.pb(service.ListCommentsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22768,19 +23513,19 @@ def test_delete_hardware_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListCommentsResponse.to_json( + service.ListCommentsResponse() ) - request = service.DeleteHardwareRequest() + request = service.ListCommentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListCommentsResponse() - client.delete_hardware( + client.list_comments( request, metadata=[ ("key", "val"), @@ -22792,8 +23537,8 @@ def test_delete_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_hardware_rest_bad_request( - transport: str = "rest", request_type=service.DeleteHardwareRequest +def test_list_comments_rest_bad_request( + transport: str = "rest", request_type=service.ListCommentsRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22801,7 +23546,7 @@ def test_delete_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22813,10 +23558,10 @@ def test_delete_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_hardware(request) + client.list_comments(request) -def test_delete_hardware_rest_flattened(): +def test_list_comments_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22825,38 +23570,40 @@ def test_delete_hardware_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListCommentsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_hardware(**mock_args) + client.list_comments(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" % client.transport._host, args[1], ) -def test_delete_hardware_rest_flattened_error(transport: str = "rest"): +def test_list_comments_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22865,61 +23612,122 @@ def test_delete_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_hardware( - service.DeleteHardwareRequest(), - name="name_value", + client.list_comments( + service.ListCommentsRequest(), + parent="parent_value", ) -def test_delete_hardware_rest_error(): +def test_list_comments_rest_pager(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListCommentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_comments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Comment) for i in results) + + pages = list(client.list_comments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListCommentsRequest, + service.GetCommentRequest, dict, ], ) -def test_list_comments_rest(request_type): +def test_get_comment_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListCommentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCommentsResponse.pb(return_value) + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_comments(request) + response = client.get_comment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCommentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE -def test_list_comments_rest_use_cached_wrapped_rpc(): +def test_get_comment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22933,33 +23741,33 @@ def test_list_comments_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_comments in client._transport._wrapped_methods + assert client._transport.get_comment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_comments] = mock_rpc + client._transport._wrapped_methods[client._transport.get_comment] = mock_rpc request = {} - client.list_comments(request) + client.get_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_comments(request) + client.get_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_comments_rest_required_fields(request_type=service.ListCommentsRequest): +def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22970,30 +23778,21 @@ def test_list_comments_rest_required_fields(request_type=service.ListCommentsReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_comments._get_unset_required_fields(jsonified_request) + ).get_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_comments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23002,7 +23801,7 @@ def test_list_comments_rest_required_fields(request_type=service.ListCommentsReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListCommentsResponse() + return_value = resources.Comment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23023,40 +23822,30 @@ def test_list_comments_rest_required_fields(request_type=service.ListCommentsReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCommentsResponse.pb(return_value) + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_comments(request) + response = client.get_comment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_comments_rest_unset_required_fields(): +def test_get_comment_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_comments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_comment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_comments_rest_interceptors(null_interceptor): +def test_get_comment_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23069,13 +23858,13 @@ def test_list_comments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_comments" + transports.GDCHardwareManagementRestInterceptor, "post_get_comment" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_comments" + transports.GDCHardwareManagementRestInterceptor, "pre_get_comment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListCommentsRequest.pb(service.ListCommentsRequest()) + pb_message = service.GetCommentRequest.pb(service.GetCommentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23086,19 +23875,17 @@ def test_list_comments_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListCommentsResponse.to_json( - service.ListCommentsResponse() - ) + req.return_value._content = resources.Comment.to_json(resources.Comment()) - request = service.ListCommentsRequest() + request = service.GetCommentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListCommentsResponse() + post.return_value = resources.Comment() - client.list_comments( + client.get_comment( request, metadata=[ ("key", "val"), @@ -23110,8 +23897,8 @@ def test_list_comments_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_comments_rest_bad_request( - transport: str = "rest", request_type=service.ListCommentsRequest +def test_get_comment_rest_bad_request( + transport: str = "rest", request_type=service.GetCommentRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23119,7 +23906,9 @@ def test_list_comments_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23131,10 +23920,10 @@ def test_list_comments_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_comments(request) + client.get_comment(request) -def test_list_comments_rest_flattened(): +def test_get_comment_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23143,14 +23932,16 @@ def test_list_comments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListCommentsResponse() + return_value = resources.Comment() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -23158,25 +23949,25 @@ def test_list_comments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCommentsResponse.pb(return_value) + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_comments(**mock_args) + client.get_comment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}" % client.transport._host, args[1], ) -def test_list_comments_rest_flattened_error(transport: str = "rest"): +def test_get_comment_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23185,120 +23976,130 @@ def test_list_comments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_comments( - service.ListCommentsRequest(), - parent="parent_value", + client.get_comment( + service.GetCommentRequest(), + name="name_value", ) -def test_list_comments_rest_pager(transport: str = "rest"): +def test_get_comment_rest_error(): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListCommentsResponse( - comments=[ - resources.Comment(), - resources.Comment(), - resources.Comment(), - ], - next_page_token="abc", - ), - service.ListCommentsResponse( - comments=[], - next_page_token="def", - ), - service.ListCommentsResponse( - comments=[ - resources.Comment(), - ], - next_page_token="ghi", - ), - service.ListCommentsResponse( - comments=[ - resources.Comment(), - resources.Comment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListCommentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} - - pager = client.list_comments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Comment) for i in results) - - pages = list(client.list_comments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GetCommentRequest, + service.CreateCommentRequest, dict, ], ) -def test_get_comment_rest(request_type): +def test_create_comment_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init["comment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "author": "author_value", + "text": "text_value", + "customer_viewed_time": {}, + "author_entity": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateCommentRequest.meta.fields["comment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["comment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["comment"][field])): + del request_init["comment"][field][i][subfield] + else: + del request_init["comment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Comment( - name="name_value", - author="author_value", - text="text_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_comment(request) + response = client.create_comment(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Comment) - assert response.name == "name_value" - assert response.author == "author_value" - assert response.text == "text_value" + assert response.operation.name == "operations/spam" -def test_get_comment_rest_use_cached_wrapped_rpc(): +def test_create_comment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23312,33 +24113,37 @@ def test_get_comment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_comment in client._transport._wrapped_methods + assert client._transport.create_comment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_comment] = mock_rpc + client._transport._wrapped_methods[client._transport.create_comment] = mock_rpc request = {} - client.get_comment(request) + client.create_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_comment(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest): +def test_create_comment_rest_required_fields(request_type=service.CreateCommentRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23349,21 +24154,28 @@ def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_comment._get_unset_required_fields(jsonified_request) + ).create_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_comment._get_unset_required_fields(jsonified_request) + ).create_comment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "comment_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23372,7 +24184,7 @@ def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Comment() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23384,39 +24196,50 @@ def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_comment(request) + response = client.create_comment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_comment_rest_unset_required_fields(): +def test_create_comment_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_comment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_comment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "commentId", + "requestId", + ) + ) + & set( + ( + "parent", + "comment", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_comment_rest_interceptors(null_interceptor): +def test_create_comment_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23429,13 +24252,15 @@ def test_get_comment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_comment" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_comment" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_comment" + transports.GDCHardwareManagementRestInterceptor, "pre_create_comment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetCommentRequest.pb(service.GetCommentRequest()) + pb_message = service.CreateCommentRequest.pb(service.CreateCommentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23446,17 +24271,19 @@ def test_get_comment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Comment.to_json(resources.Comment()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetCommentRequest() + request = service.CreateCommentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Comment() + post.return_value = operations_pb2.Operation() - client.get_comment( + client.create_comment( request, metadata=[ ("key", "val"), @@ -23468,8 +24295,8 @@ def test_get_comment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_comment_rest_bad_request( - transport: str = "rest", request_type=service.GetCommentRequest +def test_create_comment_rest_bad_request( + transport: str = "rest", request_type=service.CreateCommentRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23477,9 +24304,7 @@ def test_get_comment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23491,10 +24316,10 @@ def test_get_comment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_comment(request) + client.create_comment(request) -def test_get_comment_rest_flattened(): +def test_create_comment_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23503,42 +24328,40 @@ def test_get_comment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Comment() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_comment(**mock_args) + client.create_comment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" % client.transport._host, args[1], ) -def test_get_comment_rest_flattened_error(transport: str = "rest"): +def test_create_comment_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23547,13 +24370,15 @@ def test_get_comment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_comment( - service.GetCommentRequest(), - name="name_value", + client.create_comment( + service.CreateCommentRequest(), + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", ) -def test_get_comment_rest_error(): +def test_create_comment_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23562,113 +24387,52 @@ def test_get_comment_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateCommentRequest, + service.RecordActionOnCommentRequest, dict, ], ) -def test_create_comment_rest(request_type): +def test_record_action_on_comment_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} - request_init["comment"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "labels": {}, - "author": "author_value", - "text": "text_value", + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateCommentRequest.meta.fields["comment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["comment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["comment"][field])): - del request_init["comment"][field][i][subfield] - else: - del request_init["comment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_comment(request) + response = client.record_action_on_comment(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE -def test_create_comment_rest_use_cached_wrapped_rpc(): +def test_record_action_on_comment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23682,37 +24446,40 @@ def test_create_comment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_comment in client._transport._wrapped_methods + assert ( + client._transport.record_action_on_comment + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_comment] = mock_rpc + client._transport._wrapped_methods[ + client._transport.record_action_on_comment + ] = mock_rpc request = {} - client.create_comment(request) + client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_comment(request) + client.record_action_on_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_comment_rest_required_fields(request_type=service.CreateCommentRequest): +def test_record_action_on_comment_rest_required_fields( + request_type=service.RecordActionOnCommentRequest, +): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23723,28 +24490,21 @@ def test_create_comment_rest_required_fields(request_type=service.CreateCommentR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_comment._get_unset_required_fields(jsonified_request) + ).record_action_on_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_comment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "comment_id", - "request_id", - ) - ) + ).record_action_on_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23753,7 +24513,7 @@ def test_create_comment_rest_required_fields(request_type=service.CreateCommentR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Comment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23773,42 +24533,40 @@ def test_create_comment_rest_required_fields(request_type=service.CreateCommentR response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_comment(request) + response = client.record_action_on_comment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_comment_rest_unset_required_fields(): +def test_record_action_on_comment_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_comment._get_unset_required_fields({}) + unset_fields = transport.record_action_on_comment._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "commentId", - "requestId", - ) - ) + set(()) & set( ( - "parent", - "comment", + "name", + "actionType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_comment_rest_interceptors(null_interceptor): +def test_record_action_on_comment_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23821,15 +24579,15 @@ def test_create_comment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_comment" + transports.GDCHardwareManagementRestInterceptor, "post_record_action_on_comment" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_comment" + transports.GDCHardwareManagementRestInterceptor, "pre_record_action_on_comment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateCommentRequest.pb(service.CreateCommentRequest()) + pb_message = service.RecordActionOnCommentRequest.pb( + service.RecordActionOnCommentRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23840,19 +24598,17 @@ def test_create_comment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.Comment.to_json(resources.Comment()) - request = service.CreateCommentRequest() + request = service.RecordActionOnCommentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.Comment() - client.create_comment( + client.record_action_on_comment( request, metadata=[ ("key", "val"), @@ -23864,8 +24620,8 @@ def test_create_comment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_comment_rest_bad_request( - transport: str = "rest", request_type=service.CreateCommentRequest +def test_record_action_on_comment_rest_bad_request( + transport: str = "rest", request_type=service.RecordActionOnCommentRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23873,7 +24629,9 @@ def test_create_comment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23885,10 +24643,10 @@ def test_create_comment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_comment(request) + client.record_action_on_comment(request) -def test_create_comment_rest_flattened(): +def test_record_action_on_comment_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23897,40 +24655,43 @@ def test_create_comment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Comment() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - comment=resources.Comment(name="name_value"), - comment_id="comment_id_value", + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_comment(**mock_args) + client.record_action_on_comment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}:recordAction" % client.transport._host, args[1], ) -def test_create_comment_rest_flattened_error(transport: str = "rest"): +def test_record_action_on_comment_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23939,15 +24700,14 @@ def test_create_comment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_comment( - service.CreateCommentRequest(), - parent="parent_value", - comment=resources.Comment(name="name_value"), - comment_id="comment_id_value", + client.record_action_on_comment( + service.RecordActionOnCommentRequest(), + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) -def test_create_comment_rest_error(): +def test_record_action_on_comment_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -26069,6 +26829,13 @@ def test_create_zone_rest(request_type): "kubernetes_ipv4_subnet": {}, }, "globally_unique_id": "globally_unique_id_value", + "subscription_configs": [ + { + "subscription_id": "subscription_id_value", + "billing_id": "billing_id_value", + "state": 1, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -26498,6 +27265,13 @@ def test_update_zone_rest(request_type): "kubernetes_ipv4_subnet": {}, }, "globally_unique_id": "globally_unique_id_value", + "subscription_configs": [ + { + "subscription_id": "subscription_id_value", + "billing_id": "billing_id_value", + "state": 1, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -27428,7 +28202,7 @@ def test_signal_zone_state_rest_flattened(): # get truthy value for each flattened field mock_args = dict( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, ) mock_args.update(sample_request) @@ -27464,7 +28238,7 @@ def test_signal_zone_state_rest_flattened_error(transport: str = "rest"): client.signal_zone_state( service.SignalZoneStateRequest(), name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, ) @@ -27636,6 +28410,7 @@ def test_gdc_hardware_management_base_transport(): "list_comments", "get_comment", "create_comment", + "record_action_on_comment", "list_change_log_entries", "get_change_log_entry", "list_skus", @@ -28005,6 +28780,9 @@ def test_gdc_hardware_management_client_transport_session_collision(transport_na session1 = client1.transport.create_comment._session session2 = client2.transport.create_comment._session assert session1 != session2 + session1 = client1.transport.record_action_on_comment._session + session2 = client2.transport.record_action_on_comment._session + assert session1 != session2 session1 = client1.transport.list_change_log_entries._session session2 = client2.transport.list_change_log_entries._session assert session1 != session2 From b4c977059e075c73781c179b26fdf915548e65c4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 12:08:14 -0400 Subject: [PATCH 33/59] fix!: [google-cloud-kms] Pagination feature is introduced for method `ListKeyHandles` in service `Autokey` (#13093) BEGIN_COMMIT_OVERRIDE fix!: Pagination feature is introduced for method ListKeyHandles in service Autokey feat: Adding a state field for AutokeyConfig docs: Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional docs: A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Adding a state field for AutokeyConfig docs: Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional docs: A comment for field `destroy_scheduled_duration` in message `.google.cloud.kms.v1.CryptoKey` is updated for the default duration PiperOrigin-RevId: 676068244 Source-Link: https://github.com/googleapis/googleapis/commit/42492c963aaac713339511a145fbefcd78f95880 Source-Link: https://github.com/googleapis/googleapis-gen/commit/47432180bdfba879fc7f82c4c451181702f25009 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWttcy8uT3dsQm90LnlhbWwiLCJoIjoiNDc0MzIxODBiZGZiYTg3OWZjN2Y4MmM0YzQ1MTE4MTcwMmYyNTAwOSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-kms/docs/kms_v1/autokey.rst | 4 + .../kms_v1/services/autokey/async_client.py | 30 +- .../cloud/kms_v1/services/autokey/client.py | 29 +- .../cloud/kms_v1/services/autokey/pagers.py | 193 +++++++++++ .../services/autokey/transports/grpc.py | 5 +- .../autokey/transports/grpc_asyncio.py | 5 +- .../services/autokey/transports/rest.py | 5 +- .../services/autokey_admin/async_client.py | 4 +- .../kms_v1/services/autokey_admin/client.py | 3 +- .../services/autokey_admin/transports/grpc.py | 3 +- .../autokey_admin/transports/grpc_asyncio.py | 3 +- .../services/autokey_admin/transports/rest.py | 3 +- .../google/cloud/kms_v1/types/autokey.py | 33 ++ .../cloud/kms_v1/types/autokey_admin.py | 30 ++ .../google/cloud/kms_v1/types/ekm_service.py | 2 +- .../google/cloud/kms_v1/types/resources.py | 2 +- ...enerated_autokey_list_key_handles_async.py | 5 +- ...generated_autokey_list_key_handles_sync.py | 5 +- .../snippet_metadata_google.cloud.kms.v1.json | 16 +- .../scripts/fixup_kms_v1_keywords.py | 2 +- .../tests/unit/gapic/kms_v1/test_autokey.py | 300 +++++++++++++++++- .../unit/gapic/kms_v1/test_autokey_admin.py | 15 + 22 files changed, 648 insertions(+), 49 deletions(-) create mode 100644 packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py diff --git a/packages/google-cloud-kms/docs/kms_v1/autokey.rst b/packages/google-cloud-kms/docs/kms_v1/autokey.rst index 2335ac5e9da8..266646a17c28 100644 --- a/packages/google-cloud-kms/docs/kms_v1/autokey.rst +++ b/packages/google-cloud-kms/docs/kms_v1/autokey.rst @@ -4,3 +4,7 @@ Autokey .. automodule:: google.cloud.kms_v1.services.autokey :members: :inherited-members: + +.. automodule:: google.cloud.kms_v1.services.autokey.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py index 6f5a61fc292d..e79da53a08e6 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py @@ -49,6 +49,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.cloud.kms_v1.services.autokey import pagers from google.cloud.kms_v1.types import autokey from .client import AutokeyClient @@ -57,8 +58,9 @@ class AutokeyAsyncClient: - """Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + """Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a @@ -544,7 +546,7 @@ async def list_key_handles( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> autokey.ListKeyHandlesResponse: + ) -> pagers.ListKeyHandlesAsyncPager: r"""Lists [KeyHandles][google.cloud.kms.v1.KeyHandle]. .. code-block:: python @@ -568,10 +570,11 @@ async def sample_list_key_handles(): ) # Make the request - response = await client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: request (Optional[Union[google.cloud.kms_v1.types.ListKeyHandlesRequest, dict]]): @@ -593,10 +596,13 @@ async def sample_list_key_handles(): sent along with the request as metadata. Returns: - google.cloud.kms_v1.types.ListKeyHandlesResponse: + google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesAsyncPager: Response message for [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles]. + Iterating over this object will yield results and + resolve additional pages automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -641,6 +647,17 @@ async def sample_list_key_handles(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListKeyHandlesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response @@ -855,6 +872,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py index e17e6a6fb76e..8656ffb6a4db 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py @@ -55,6 +55,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.cloud.kms_v1.services.autokey import pagers from google.cloud.kms_v1.types import autokey from .transports.base import DEFAULT_CLIENT_INFO, AutokeyTransport @@ -99,8 +100,9 @@ def get_transport_class( class AutokeyClient(metaclass=AutokeyClientMeta): - """Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + """Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a @@ -986,7 +988,7 @@ def list_key_handles( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> autokey.ListKeyHandlesResponse: + ) -> pagers.ListKeyHandlesPager: r"""Lists [KeyHandles][google.cloud.kms.v1.KeyHandle]. .. code-block:: python @@ -1010,10 +1012,11 @@ def sample_list_key_handles(): ) # Make the request - response = client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: request (Union[google.cloud.kms_v1.types.ListKeyHandlesRequest, dict]): @@ -1035,10 +1038,13 @@ def sample_list_key_handles(): sent along with the request as metadata. Returns: - google.cloud.kms_v1.types.ListKeyHandlesResponse: + google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesPager: Response message for [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles]. + Iterating over this object will yield results and + resolve additional pages automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1080,6 +1086,17 @@ def sample_list_key_handles(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListKeyHandlesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py new file mode 100644 index 000000000000..5ba18404a1ec --- /dev/null +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.kms_v1.types import autokey + + +class ListKeyHandlesPager: + """A pager for iterating through ``list_key_handles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``key_handles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListKeyHandles`` requests and continue to iterate + through the ``key_handles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., autokey.ListKeyHandlesResponse], + request: autokey.ListKeyHandlesRequest, + response: autokey.ListKeyHandlesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.kms_v1.types.ListKeyHandlesRequest): + The initial request object. + response (google.cloud.kms_v1.types.ListKeyHandlesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = autokey.ListKeyHandlesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[autokey.ListKeyHandlesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autokey.KeyHandle]: + for page in self.pages: + yield from page.key_handles + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListKeyHandlesAsyncPager: + """A pager for iterating through ``list_key_handles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``key_handles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListKeyHandles`` requests and continue to iterate + through the ``key_handles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[autokey.ListKeyHandlesResponse]], + request: autokey.ListKeyHandlesRequest, + response: autokey.ListKeyHandlesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.kms_v1.types.ListKeyHandlesRequest): + The initial request object. + response (google.cloud.kms_v1.types.ListKeyHandlesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = autokey.ListKeyHandlesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[autokey.ListKeyHandlesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[autokey.KeyHandle]: + async def async_generator(): + async for page in self.pages: + for response in page.key_handles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py index daf4f02878d3..e248c23a9c2f 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py @@ -34,8 +34,9 @@ class AutokeyGrpcTransport(AutokeyTransport): """gRPC backend transport for Autokey. - Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py index 0028a1b6eb14..efd29a509e7f 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py @@ -37,8 +37,9 @@ class AutokeyGrpcAsyncIOTransport(AutokeyTransport): """gRPC AsyncIO backend transport for Autokey. - Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py index d53e8639b8d9..2d9832d218dc 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py @@ -316,8 +316,9 @@ class AutokeyRestStub: class AutokeyRestTransport(AutokeyTransport): """REST backend transport for Autokey. - Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py index 6c285d379502..af84851a3916 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py @@ -56,7 +56,8 @@ class AutokeyAdminAsyncClient: - """Provides interfaces for managing Cloud KMS Autokey folder-level + """Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder @@ -825,6 +826,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py index ea8aa35437b3..9d2044169727 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py @@ -98,7 +98,8 @@ def get_transport_class( class AutokeyAdminClient(metaclass=AutokeyAdminClientMeta): - """Provides interfaces for managing Cloud KMS Autokey folder-level + """Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py index 1b7ac8d3dc43..6ee4354598fb 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py @@ -34,7 +34,8 @@ class AutokeyAdminGrpcTransport(AutokeyAdminTransport): """gRPC backend transport for AutokeyAdmin. - Provides interfaces for managing Cloud KMS Autokey folder-level + Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py index b1c8dfa31ad4..f6fcb383e2f5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py @@ -37,7 +37,8 @@ class AutokeyAdminGrpcAsyncIOTransport(AutokeyAdminTransport): """gRPC AsyncIO backend transport for AutokeyAdmin. - Provides interfaces for managing Cloud KMS Autokey folder-level + Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py index a6b03e22c972..e8affeb02ab6 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py @@ -316,7 +316,8 @@ class AutokeyAdminRestStub: class AutokeyAdminRestTransport(AutokeyAdminTransport): """REST backend transport for AutokeyAdmin. - Provides interfaces for managing Cloud KMS Autokey folder-level + Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py index 3a5e93a32f61..f94b8284eeb5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py @@ -145,6 +145,19 @@ class ListKeyHandlesRequest(proto.Message): Required. Name of the resource project and location from which to list [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g. ``projects/{PROJECT_ID}/locations/{LOCATION}``. + page_size (int): + Optional. Optional limit on the number of + [KeyHandles][google.cloud.kms.v1.KeyHandle] to include in + the response. The service may return fewer than this value. + Further [KeyHandles][google.cloud.kms.v1.KeyHandle] can + subsequently be obtained by including the + [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token] + in a subsequent request. If unspecified, at most 100 + [KeyHandles][google.cloud.kms.v1.KeyHandle] will be + returned. + page_token (str): + Optional. Optional pagination token, returned earlier via + [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token]. filter (str): Optional. Filter to apply when listing [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g. @@ -155,6 +168,14 @@ class ListKeyHandlesRequest(proto.Message): proto.STRING, number=1, ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) filter: str = proto.Field( proto.STRING, number=4, @@ -168,13 +189,25 @@ class ListKeyHandlesResponse(proto.Message): Attributes: key_handles (MutableSequence[google.cloud.kms_v1.types.KeyHandle]): Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. + next_page_token (str): + A token to retrieve next page of results. Pass this value in + [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] + to retrieve the next page of results. """ + @property + def raw_page(self): + return self + key_handles: MutableSequence["KeyHandle"] = proto.RepeatedField( proto.MESSAGE, number=1, message="KeyHandle", ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py index b190c6d01c97..d99b60ed72cc 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py @@ -97,8 +97,33 @@ class AutokeyConfig(proto.Message): for this key project must be granted the ``cloudkms.admin`` role (or pertinent permissions). A request with an empty key project field will clear the configuration. + state (google.cloud.kms_v1.types.AutokeyConfig.State): + Output only. The state for the AutokeyConfig. """ + class State(proto.Enum): + r"""The states AutokeyConfig can be in. + + Values: + STATE_UNSPECIFIED (0): + The state of the AutokeyConfig is + unspecified. + ACTIVE (1): + The AutokeyConfig is currently active. + KEY_PROJECT_DELETED (2): + A previously configured key project has been + deleted and the current AutokeyConfig is + unusable. + UNINITIALIZED (3): + The AutokeyConfig is not yet initialized or + has been reset to its default uninitialized + state. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + KEY_PROJECT_DELETED = 2 + UNINITIALIZED = 3 + name: str = proto.Field( proto.STRING, number=1, @@ -107,6 +132,11 @@ class AutokeyConfig(proto.Message): proto.STRING, number=2, ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) class ShowEffectiveAutokeyConfigRequest(proto.Message): diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py index 6e55bed6e26f..8fda5560ca60 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py @@ -348,7 +348,7 @@ class EkmConnection(proto.Message): [EkmConnection][google.cloud.kms.v1.EkmConnection] was created. service_resolvers (MutableSequence[google.cloud.kms_v1.types.EkmConnection.ServiceResolver]): - A list of + Optional. A list of [ServiceResolvers][google.cloud.kms.v1.EkmConnection.ServiceResolver] where the EKM can be reached. There should be one ServiceResolver per EKM replica. Currently, only a single diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index d7f70db29d1a..1cf5fc19392b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -276,7 +276,7 @@ class CryptoKey(proto.Message): state before transitioning to [DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED]. If not specified at creation time, the default duration is - 24 hours. + 30 days. crypto_key_backend (str): Immutable. The resource name of the backend environment where the key material for all diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py index e7cfa3289ce7..bc76498134df 100644 --- a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py @@ -44,9 +44,10 @@ async def sample_list_key_handles(): ) # Make the request - response = await client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) # [END cloudkms_v1_generated_Autokey_ListKeyHandles_async] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py index 78f4b24566a0..6a7ef9a327e8 100644 --- a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py @@ -44,9 +44,10 @@ def sample_list_key_handles(): ) # Make the request - response = client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + for response in page_result: + print(response) # [END cloudkms_v1_generated_Autokey_ListKeyHandles_sync] diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index d50814dbd477..a5e165bff4c4 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -879,7 +879,7 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.kms_v1.types.ListKeyHandlesResponse", + "resultType": "google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesAsyncPager", "shortName": "list_key_handles" }, "description": "Sample for ListKeyHandles", @@ -889,12 +889,12 @@ "regionTag": "cloudkms_v1_generated_Autokey_ListKeyHandles_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -914,7 +914,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } @@ -959,7 +959,7 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.kms_v1.types.ListKeyHandlesResponse", + "resultType": "google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesPager", "shortName": "list_key_handles" }, "description": "Sample for ListKeyHandles", @@ -969,12 +969,12 @@ "regionTag": "cloudkms_v1_generated_Autokey_ListKeyHandles_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -994,7 +994,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } diff --git a/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py b/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py index 76ad33b20666..7a838e47d117 100644 --- a/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py +++ b/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py @@ -65,7 +65,7 @@ class kmsCallTransformer(cst.CSTTransformer): 'list_crypto_key_versions': ('parent', 'page_size', 'page_token', 'view', 'filter', 'order_by', ), 'list_ekm_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_import_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_key_handles': ('parent', 'filter', ), + 'list_key_handles': ('parent', 'page_size', 'page_token', 'filter', ), 'list_key_rings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'mac_sign': ('name', 'data', 'data_crc32c', ), 'mac_verify': ('name', 'data', 'mac', 'data_crc32c', 'mac_crc32c', ), diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py index 3e124c98ef19..6d5f1694bcd9 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py @@ -60,6 +60,7 @@ from google.cloud.kms_v1.services.autokey import ( AutokeyAsyncClient, AutokeyClient, + pagers, transports, ) from google.cloud.kms_v1.types import autokey @@ -1866,7 +1867,9 @@ def test_list_key_handles(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = autokey.ListKeyHandlesResponse() + call.return_value = autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) response = client.list_key_handles(request) # Establish that the underlying gRPC stub method was called. @@ -1876,7 +1879,8 @@ def test_list_key_handles(request_type, transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, autokey.ListKeyHandlesResponse) + assert isinstance(response, pagers.ListKeyHandlesPager) + assert response.next_page_token == "next_page_token_value" def test_list_key_handles_empty_call(): @@ -1911,6 +1915,7 @@ def test_list_key_handles_non_empty_request_with_auto_populated_field(): # if they meet the requirements of AIP 4235. request = autokey.ListKeyHandlesRequest( parent="parent_value", + page_token="page_token_value", filter="filter_value", ) @@ -1924,6 +1929,7 @@ def test_list_key_handles_non_empty_request_with_auto_populated_field(): _, args, _ = call.mock_calls[0] assert args[0] == autokey.ListKeyHandlesRequest( parent="parent_value", + page_token="page_token_value", filter="filter_value", ) @@ -1978,7 +1984,9 @@ async def test_list_key_handles_empty_call_async(): with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - autokey.ListKeyHandlesResponse() + autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_key_handles() call.assert_called() @@ -2045,7 +2053,9 @@ async def test_list_key_handles_async( with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - autokey.ListKeyHandlesResponse() + autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_key_handles(request) @@ -2056,7 +2066,8 @@ async def test_list_key_handles_async( assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, autokey.ListKeyHandlesResponse) + assert isinstance(response, pagers.ListKeyHandlesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2207,6 +2218,200 @@ async def test_list_key_handles_flattened_error_async(): ) +def test_list_key_handles_pager(transport_name: str = "grpc"): + client = AutokeyClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_key_handles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autokey.KeyHandle) for i in results) + + +def test_list_key_handles_pages(transport_name: str = "grpc"): + client = AutokeyClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + pages = list(client.list_key_handles(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_key_handles_async_pager(): + client = AutokeyAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_key_handles), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_key_handles( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, autokey.KeyHandle) for i in responses) + + +@pytest.mark.asyncio +async def test_list_key_handles_async_pages(): + client = AutokeyAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_key_handles), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_key_handles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -2910,7 +3115,9 @@ def test_list_key_handles_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = autokey.ListKeyHandlesResponse() + return_value = autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -2924,7 +3131,8 @@ def test_list_key_handles_rest(request_type): response = client.list_key_handles(request) # Establish that the response is the type that we expect. - assert isinstance(response, autokey.ListKeyHandlesResponse) + assert isinstance(response, pagers.ListKeyHandlesPager) + assert response.next_page_token == "next_page_token_value" def test_list_key_handles_rest_use_cached_wrapped_rpc(): @@ -2993,7 +3201,13 @@ def test_list_key_handles_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).list_key_handles._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter",)) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3047,7 +3261,16 @@ def test_list_key_handles_rest_unset_required_fields(): ) unset_fields = transport.list_key_handles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter",)) & set(("parent",))) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3183,11 +3406,66 @@ def test_list_key_handles_rest_flattened_error(transport: str = "rest"): ) -def test_list_key_handles_rest_error(): +def test_list_key_handles_rest_pager(transport: str = "rest"): client = AutokeyClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(autokey.ListKeyHandlesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_key_handles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autokey.KeyHandle) for i in results) + + pages = list(client.list_key_handles(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py index 6155ff4520d9..0e7ca7eda0b3 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py @@ -1127,6 +1127,7 @@ def test_update_autokey_config(request_type, transport: str = "grpc"): call.return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) response = client.update_autokey_config(request) @@ -1140,6 +1141,7 @@ def test_update_autokey_config(request_type, transport: str = "grpc"): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_update_autokey_config_empty_call(): @@ -1247,6 +1249,7 @@ async def test_update_autokey_config_empty_call_async(): autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.update_autokey_config() @@ -1320,6 +1323,7 @@ async def test_update_autokey_config_async( autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.update_autokey_config(request) @@ -1334,6 +1338,7 @@ async def test_update_autokey_config_async( assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE @pytest.mark.asyncio @@ -1527,6 +1532,7 @@ def test_get_autokey_config(request_type, transport: str = "grpc"): call.return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) response = client.get_autokey_config(request) @@ -1540,6 +1546,7 @@ def test_get_autokey_config(request_type, transport: str = "grpc"): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_get_autokey_config_empty_call(): @@ -1650,6 +1657,7 @@ async def test_get_autokey_config_empty_call_async(): autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.get_autokey_config() @@ -1722,6 +1730,7 @@ async def test_get_autokey_config_async( autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.get_autokey_config(request) @@ -1736,6 +1745,7 @@ async def test_get_autokey_config_async( assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE @pytest.mark.asyncio @@ -2301,6 +2311,7 @@ def test_update_autokey_config_rest(request_type): request_init["autokey_config"] = { "name": "folders/sample1/autokeyConfig", "key_project": "key_project_value", + "state": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -2377,6 +2388,7 @@ def get_message_fields(field): return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -2394,6 +2406,7 @@ def get_message_fields(field): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_update_autokey_config_rest_use_cached_wrapped_rpc(): @@ -2695,6 +2708,7 @@ def test_get_autokey_config_rest(request_type): return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -2712,6 +2726,7 @@ def test_get_autokey_config_rest(request_type): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_get_autokey_config_rest_use_cached_wrapped_rpc(): From 7b4b877e9295ac6324dadf5c1b8fe06d97a49c5c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 13:05:40 -0400 Subject: [PATCH 34/59] chore: release main (#13097) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-ai-generativelanguage: 0.6.10 ## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.9...google-ai-generativelanguage-v0.6.10) (2024-09-23) ### Features * Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add GoogleSearchRetrieval tool and candidate.grounding_metadata ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add HarmBlockThreshold.OFF ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add PredictionService (for Imagen) ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add Schema.min_items ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add TunedModels.reader_project_numbers ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) ### Documentation * Small fixes ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Tag HarmCategories by the model family they're used on. ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5))
google-analytics-data: 0.18.12 ## [0.18.12](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.11...google-analytics-data-v0.18.12) (2024-09-23) ### Features * add `GetPropertyQuotasSnapshot` method to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) * add `PropertyQuotasSnapshot` type to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) ### Documentation * update the documentation for the `CreateReportTask` method ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c))
google-cloud-build: 3.25.0 ## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.2...google-cloud-build-v3.25.0) (2024-09-23) ### Features * Add LEGACY_BUCKET option to DefaultLogsBucketBehavior ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) ### Documentation * Sanitize docs ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73))
google-cloud-dialogflow: 2.32.0 ## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.31.0...google-cloud-dialogflow-v2.32.0) (2024-09-23) ### Features * created new boolean fields in conversation model for zone isolation and zone separation compliance status ([1f8b564](https://github.com/googleapis/google-cloud-python/commit/1f8b5640b0ac5397318ede4ebcfa120120ebccc8))
google-cloud-dlp: 3.23.0 ## [3.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.22.0...google-cloud-dlp-v3.23.0) (2024-09-23) ### Features * action for publishing data profiles to SecOps (formelly known as Chronicle) ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) * action for publishing data profiles to Security Command Center ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) * discovery configs for AWS S3 buckets ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) ### Documentation * small improvements and clarifications ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969))
google-cloud-gdchardwaremanagement: 0.1.4 ## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.3...google-cloud-gdchardwaremanagement-v0.1.4) (2024-09-23) ### Features * add an order type field to distinguish a fulfillment request from a sales inquiry ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) * add support to mark comments as read or unread ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) * rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) ### Documentation * clarify how access_times are used ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef))
google-cloud-kms: 3.0.0 ## [3.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.2...google-cloud-kms-v3.0.0) (2024-09-23) ### ⚠ BREAKING CHANGES * Pagination feature is introduced for method ListKeyHandles in service Autokey ### Features * Adding a state field for AutokeyConfig ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ### Bug Fixes * Pagination feature is introduced for method ListKeyHandles in service Autokey ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ### Documentation * A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) * Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4))
google-maps-places: 0.1.18 ## [0.1.18](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.17...google-maps-places-v0.1.18) (2024-09-23) ### Features * action for publishing data profiles to SecOps (formelly known as Chronicle) ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) * action for publishing data profiles to Security Command Center ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) * discovery configs for AWS S3 buckets ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) ### Documentation * small improvements and clarifications ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 16 +++++++------- .../google-ai-generativelanguage/CHANGELOG.md | 19 ++++++++++++++++ .../ai/generativelanguage/gapic_version.py | 2 +- .../ai/generativelanguage_v1/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- ...adata_google.ai.generativelanguage.v1.json | 2 +- ...a_google.ai.generativelanguage.v1beta.json | 2 +- ..._google.ai.generativelanguage.v1beta2.json | 2 +- ..._google.ai.generativelanguage.v1beta3.json | 2 +- packages/google-analytics-data/CHANGELOG.md | 13 +++++++++++ .../google/analytics/data/gapic_version.py | 2 +- .../analytics/data_v1alpha/gapic_version.py | 2 +- .../analytics/data_v1beta/gapic_version.py | 2 +- ...etadata_google.analytics.data.v1alpha.json | 2 +- ...metadata_google.analytics.data.v1beta.json | 2 +- packages/google-cloud-build/CHANGELOG.md | 12 ++++++++++ .../devtools/cloudbuild/gapic_version.py | 2 +- .../devtools/cloudbuild_v1/gapic_version.py | 2 +- .../devtools/cloudbuild_v2/gapic_version.py | 2 +- ...etadata_google.devtools.cloudbuild.v1.json | 2 +- ...etadata_google.devtools.cloudbuild.v2.json | 2 +- packages/google-cloud-dialogflow/CHANGELOG.md | 7 ++++++ .../google/cloud/dialogflow/gapic_version.py | 2 +- .../cloud/dialogflow_v2/gapic_version.py | 2 +- .../cloud/dialogflow_v2beta1/gapic_version.py | 2 +- ...t_metadata_google.cloud.dialogflow.v2.json | 2 +- ...adata_google.cloud.dialogflow.v2beta1.json | 2 +- packages/google-cloud-dlp/CHANGELOG.md | 14 ++++++++++++ .../google/cloud/dlp/gapic_version.py | 2 +- .../google/cloud/dlp_v2/gapic_version.py | 2 +- ...nippet_metadata_google.privacy.dlp.v2.json | 2 +- .../CHANGELOG.md | 14 ++++++++++++ .../gdchardwaremanagement/gapic_version.py | 2 +- .../gapic_version.py | 2 +- ...e.cloud.gdchardwaremanagement.v1alpha.json | 2 +- packages/google-cloud-kms/CHANGELOG.md | 22 +++++++++++++++++++ .../google/cloud/kms/gapic_version.py | 2 +- .../google/cloud/kms_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.kms.v1.json | 2 +- packages/google-maps-places/CHANGELOG.md | 14 ++++++++++++ .../google/maps/places/gapic_version.py | 2 +- .../google/maps/places_v1/gapic_version.py | 2 +- ...nippet_metadata_google.maps.places.v1.json | 2 +- 45 files changed, 159 insertions(+), 44 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 19f3bed603ce..9b7f01b6663b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,9 +1,9 @@ { "packages/google-ads-admanager": "0.1.2", "packages/google-ads-marketingplatform-admin": "0.1.0", - "packages/google-ai-generativelanguage": "0.6.9", + "packages/google-ai-generativelanguage": "0.6.10", "packages/google-analytics-admin": "0.23.0", - "packages/google-analytics-data": "0.18.11", + "packages/google-analytics-data": "0.18.12", "packages/google-apps-card": "0.1.4", "packages/google-apps-chat": "0.1.11", "packages/google-apps-events-subscriptions": "0.1.2", @@ -46,7 +46,7 @@ "packages/google-cloud-billing": "1.13.6", "packages/google-cloud-billing-budgets": "1.14.5", "packages/google-cloud-binary-authorization": "1.10.5", - "packages/google-cloud-build": "3.24.2", + "packages/google-cloud-build": "3.25.0", "packages/google-cloud-certificate-manager": "1.7.2", "packages/google-cloud-channel": "1.18.5", "packages/google-cloud-cloudcontrolspartner": "0.2.0", @@ -73,10 +73,10 @@ "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", "packages/google-cloud-developerconnect": "0.1.2", - "packages/google-cloud-dialogflow": "2.31.0", + "packages/google-cloud-dialogflow": "2.32.0", "packages/google-cloud-dialogflow-cx": "1.35.0", "packages/google-cloud-discoveryengine": "0.12.2", - "packages/google-cloud-dlp": "3.22.0", + "packages/google-cloud-dlp": "3.23.0", "packages/google-cloud-dms": "1.9.5", "packages/google-cloud-documentai": "2.32.0", "packages/google-cloud-domains": "1.7.5", @@ -88,7 +88,7 @@ "packages/google-cloud-eventarc-publishing": "0.6.11", "packages/google-cloud-filestore": "1.9.5", "packages/google-cloud-functions": "1.17.0", - "packages/google-cloud-gdchardwaremanagement": "0.1.3", + "packages/google-cloud-gdchardwaremanagement": "0.1.4", "packages/google-cloud-gke-backup": "0.5.11", "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", @@ -98,7 +98,7 @@ "packages/google-cloud-iam-logging": "1.3.5", "packages/google-cloud-iap": "1.13.5", "packages/google-cloud-ids": "1.7.5", - "packages/google-cloud-kms": "2.24.2", + "packages/google-cloud-kms": "3.0.0", "packages/google-cloud-kms-inventory": "0.2.8", "packages/google-cloud-language": "2.14.0", "packages/google-cloud-life-sciences": "0.9.12", @@ -182,7 +182,7 @@ "packages/google-maps-fleetengine": "0.2.2", "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", - "packages/google-maps-places": "0.1.17", + "packages/google-maps-places": "0.1.18", "packages/google-maps-routeoptimization": "0.1.3", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index d3b8538f00c3..c6fa336083ab 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.9...google-ai-generativelanguage-v0.6.10) (2024-09-23) + + +### Features + +* Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add GoogleSearchRetrieval tool and candidate.grounding_metadata ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmBlockThreshold.OFF ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add PredictionService (for Imagen) ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add Schema.min_items ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add TunedModels.reader_project_numbers ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + + +### Documentation + +* Small fixes ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Tag HarmCategories by the model family they're used on. ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + ## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.8...google-ai-generativelanguage-v0.6.9) (2024-08-19) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index d6c3fe4c5051..416353581730 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index c418dfa10386..a2110fd118ef 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index 5b7d0a0509b4..865de14ffa13 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 91de9e353f90..7fbde27c9197 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index ad685dbf13a6..f863e9dda9cd 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.18.12](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.11...google-analytics-data-v0.18.12) (2024-09-23) + + +### Features + +* add `GetPropertyQuotasSnapshot` method to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) +* add `PropertyQuotasSnapshot` type to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + + +### Documentation + +* update the documentation for the `CreateReportTask` method ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + ## [0.18.11](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.10...google-analytics-data-v0.18.11) (2024-08-08) diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 15f8d0d6e6d8..d5b5816eb6e5 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.1.0" + "version": "0.18.12" }, "snippets": [ { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 29e86a085403..753d0fcebd81 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.1.0" + "version": "0.18.12" }, "snippets": [ { diff --git a/packages/google-cloud-build/CHANGELOG.md b/packages/google-cloud-build/CHANGELOG.md index 619b570d55f6..fb07ad800441 100644 --- a/packages/google-cloud-build/CHANGELOG.md +++ b/packages/google-cloud-build/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.2...google-cloud-build-v3.25.0) (2024-09-23) + + +### Features + +* Add LEGACY_BUCKET option to DefaultLogsBucketBehavior ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + + +### Documentation + +* Sanitize docs ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + ## [3.24.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.1...google-cloud-build-v3.24.2) (2024-07-30) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index e379efab560f..66e42a84ba95 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "0.1.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 818d3fc2029c..f4891e033575 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "0.1.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/CHANGELOG.md b/packages/google-cloud-dialogflow/CHANGELOG.md index a2dbd7ce4862..cf54bc09dc6d 100644 --- a/packages/google-cloud-dialogflow/CHANGELOG.md +++ b/packages/google-cloud-dialogflow/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/dialogflow/#history +## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.31.0...google-cloud-dialogflow-v2.32.0) (2024-09-23) + + +### Features + +* created new boolean fields in conversation model for zone isolation and zone separation compliance status ([1f8b564](https://github.com/googleapis/google-cloud-python/commit/1f8b5640b0ac5397318ede4ebcfa120120ebccc8)) + ## [2.31.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.30.2...google-cloud-dialogflow-v2.31.0) (2024-08-08) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index dde14d384e60..7e99cd1321e2 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index 58a96bc185e8..a9752b2203e1 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-dlp/CHANGELOG.md b/packages/google-cloud-dlp/CHANGELOG.md index 589d11fd9bed..fa617bb88d88 100644 --- a/packages/google-cloud-dlp/CHANGELOG.md +++ b/packages/google-cloud-dlp/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-dlp/#history +## [3.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.22.0...google-cloud-dlp-v3.23.0) (2024-09-23) + + +### Features + +* action for publishing data profiles to SecOps (formelly known as Chronicle) ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) +* action for publishing data profiles to Security Command Center ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) +* discovery configs for AWS S3 buckets ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) + + +### Documentation + +* small improvements and clarifications ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) + ## [3.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.21.0...google-cloud-dlp-v3.22.0) (2024-08-19) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index 558c8aab67c5..9304602da4e3 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index 558c8aab67c5..9304602da4e3 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index 4da85d5c6cd9..0555761e7f11 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "0.1.0" + "version": "3.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md index c00b0d36bea8..08b9cafd12e1 100644 --- a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md +++ b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.3...google-cloud-gdchardwaremanagement-v0.1.4) (2024-09-23) + + +### Features + +* add an order type field to distinguish a fulfillment request from a sales inquiry ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) +* add support to mark comments as read or unread ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) +* rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) + + +### Documentation + +* clarify how access_times are used ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.2...google-cloud-gdchardwaremanagement-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json index 588b33c7fb6e..d6af9346263f 100644 --- a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gdchardwaremanagement", - "version": "0.1.0" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-cloud-kms/CHANGELOG.md b/packages/google-cloud-kms/CHANGELOG.md index e46b3a3ebd63..22ad6d9a6bcf 100644 --- a/packages/google-cloud-kms/CHANGELOG.md +++ b/packages/google-cloud-kms/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-kms/#history +## [3.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.2...google-cloud-kms-v3.0.0) (2024-09-23) + + +### ⚠ BREAKING CHANGES + +* Pagination feature is introduced for method ListKeyHandles in service Autokey + +### Features + +* Adding a state field for AutokeyConfig ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) + + +### Bug Fixes + +* Pagination feature is introduced for method ListKeyHandles in service Autokey ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) + + +### Documentation + +* A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) +* Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) + ## [2.24.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.1...google-cloud-kms-v2.24.2) (2024-07-30) diff --git a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py index 558c8aab67c5..b657023d6068 100644 --- a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py index 558c8aab67c5..b657023d6068 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index a5e165bff4c4..1630512d4e6c 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms", - "version": "0.1.0" + "version": "3.0.0" }, "snippets": [ { diff --git a/packages/google-maps-places/CHANGELOG.md b/packages/google-maps-places/CHANGELOG.md index be0f7fb9c67e..7bb089f1c273 100644 --- a/packages/google-maps-places/CHANGELOG.md +++ b/packages/google-maps-places/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.18](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.17...google-maps-places-v0.1.18) (2024-09-23) + + +### Features + +* action for publishing data profiles to SecOps (formelly known as Chronicle) ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) +* action for publishing data profiles to Security Command Center ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) +* discovery configs for AWS S3 buckets ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) + + +### Documentation + +* small improvements and clarifications ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) + ## [0.1.17](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.16...google-maps-places-v0.1.17) (2024-07-30) diff --git a/packages/google-maps-places/google/maps/places/gapic_version.py b/packages/google-maps-places/google/maps/places/gapic_version.py index 558c8aab67c5..3c51a1157647 100644 --- a/packages/google-maps-places/google/maps/places/gapic_version.py +++ b/packages/google-maps-places/google/maps/places/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.18" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/gapic_version.py b/packages/google-maps-places/google/maps/places_v1/gapic_version.py index 558c8aab67c5..3c51a1157647 100644 --- a/packages/google-maps-places/google/maps/places_v1/gapic_version.py +++ b/packages/google-maps-places/google/maps/places_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.18" # {x-release-please-version} diff --git a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json index 74eb224b30a6..e9caf90c3929 100644 --- a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json +++ b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-places", - "version": "0.1.0" + "version": "0.1.18" }, "snippets": [ { From c638f1f55a85a228ec6385095ca1befb54067188 Mon Sep 17 00:00:00 2001 From: "owlbot-bootstrapper[bot]" <104649659+owlbot-bootstrapper[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:14:28 -0400 Subject: [PATCH 35/59] feat: add initial files for google.cloud.oracledatabase.v1 (#13100) Source-Link: https://github.com/googleapis/googleapis-gen/commit/09d68f35365c74ad276cea3e7c26553a1485faa0 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yYWNsZWRhdGFiYXNlLy5Pd2xCb3QueWFtbCIsImgiOiIwOWQ2OGYzNTM2NWM3NGFkMjc2Y2VhM2U3YzI2NTUzYTE0ODVmYWEwIn0= PiperOrigin-RevId: 367526014 --------- Co-authored-by: Owlbot Bootstrapper Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-oracledatabase/.OwlBot.yaml | 18 + .../google-cloud-oracledatabase/.coveragerc | 13 + packages/google-cloud-oracledatabase/.flake8 | 33 + .../google-cloud-oracledatabase/.gitignore | 63 + .../.repo-metadata.json | 17 + .../google-cloud-oracledatabase/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../CONTRIBUTING.rst | 271 + packages/google-cloud-oracledatabase/LICENSE | 202 + .../google-cloud-oracledatabase/MANIFEST.in | 25 + .../google-cloud-oracledatabase/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../google-cloud-oracledatabase/docs/conf.py | 384 + .../docs/index.rst | 28 + .../docs/multiprocessing.rst | 7 + .../oracledatabase_v1/oracle_database.rst | 10 + .../docs/oracledatabase_v1/services_.rst | 6 + .../docs/oracledatabase_v1/types_.rst | 6 + .../docs/summary_overview.md | 22 + .../google/cloud/oracledatabase/__init__.py | 177 + .../cloud/oracledatabase/gapic_version.py | 16 + .../google/cloud/oracledatabase/py.typed | 2 + .../cloud/oracledatabase_v1/__init__.py | 168 + .../oracledatabase_v1/gapic_metadata.json | 128 + .../cloud/oracledatabase_v1/gapic_version.py | 16 + .../google/cloud/oracledatabase_v1/py.typed | 2 + .../oracledatabase_v1/services/__init__.py | 15 + .../services/oracle_database/__init__.py | 18 + .../services/oracle_database/client.py | 4073 ++++++ .../services/oracle_database/pagers.py | 876 ++ .../oracle_database/transports/__init__.py | 30 + .../oracle_database/transports/base.py | 731 ++ .../oracle_database/transports/rest.py | 3718 ++++++ .../cloud/oracledatabase_v1/types/__init__.py | 157 + .../types/autonomous_database.py | 1421 +++ .../autonomous_database_character_set.py | 78 + .../types/autonomous_db_backup.py | 289 + .../types/autonomous_db_version.py | 71 + .../cloud/oracledatabase_v1/types/common.py | 47 + .../cloud/oracledatabase_v1/types/db_node.py | 158 + .../oracledatabase_v1/types/db_server.py | 163 + .../types/db_system_shape.py | 117 + .../oracledatabase_v1/types/entitlement.py | 127 + .../oracledatabase_v1/types/exadata_infra.py | 468 + .../oracledatabase_v1/types/gi_version.py | 54 + .../types/location_metadata.py | 45 + .../oracledatabase_v1/types/oracledatabase.py | 1244 ++ .../oracledatabase_v1/types/vm_cluster.py | 437 + packages/google-cloud-oracledatabase/mypy.ini | 3 + .../google-cloud-oracledatabase/noxfile.py | 452 + ...atabase_create_autonomous_database_sync.py | 62 + ...reate_cloud_exadata_infrastructure_sync.py | 57 + ...e_database_create_cloud_vm_cluster_sync.py | 64 + ...atabase_delete_autonomous_database_sync.py | 56 + ...elete_cloud_exadata_infrastructure_sync.py | 56 + ...e_database_delete_cloud_vm_cluster_sync.py | 56 + ...enerate_autonomous_database_wallet_sync.py | 53 + ...e_database_get_autonomous_database_sync.py | 52 + ...e_get_cloud_exadata_infrastructure_sync.py | 52 + ...acle_database_get_cloud_vm_cluster_sync.py | 52 + ...e_list_autonomous_database_backups_sync.py | 53 + ...autonomous_database_character_sets_sync.py | 53 + ...database_list_autonomous_databases_sync.py | 53 + ...tabase_list_autonomous_db_versions_sync.py | 53 + ...list_cloud_exadata_infrastructures_sync.py | 53 + ...le_database_list_cloud_vm_clusters_sync.py | 53 + ...ated_oracle_database_list_db_nodes_sync.py | 53 + ...ed_oracle_database_list_db_servers_sync.py | 53 + ...cle_database_list_db_system_shapes_sync.py | 53 + ..._oracle_database_list_entitlements_sync.py | 53 + ...d_oracle_database_list_gi_versions_sync.py | 53 + ...tabase_restore_autonomous_database_sync.py | 56 + ...tadata_google.cloud.oracledatabase.v1.json | 1815 +++ .../scripts/decrypt-secrets.sh | 46 + .../fixup_oracledatabase_v1_keywords.py | 197 + packages/google-cloud-oracledatabase/setup.py | 95 + .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../unit/gapic/oracledatabase_v1/__init__.py | 15 + .../oracledatabase_v1/test_oracle_database.py | 10589 ++++++++++++++++ 91 files changed, 30726 insertions(+) create mode 100644 packages/google-cloud-oracledatabase/.OwlBot.yaml create mode 100644 packages/google-cloud-oracledatabase/.coveragerc create mode 100644 packages/google-cloud-oracledatabase/.flake8 create mode 100644 packages/google-cloud-oracledatabase/.gitignore create mode 100644 packages/google-cloud-oracledatabase/.repo-metadata.json create mode 100644 packages/google-cloud-oracledatabase/CHANGELOG.md create mode 100644 packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-oracledatabase/CONTRIBUTING.rst create mode 100644 packages/google-cloud-oracledatabase/LICENSE create mode 100644 packages/google-cloud-oracledatabase/MANIFEST.in create mode 100644 packages/google-cloud-oracledatabase/README.rst create mode 120000 packages/google-cloud-oracledatabase/docs/CHANGELOG.md create mode 120000 packages/google-cloud-oracledatabase/docs/README.rst create mode 100644 packages/google-cloud-oracledatabase/docs/_static/custom.css create mode 100644 packages/google-cloud-oracledatabase/docs/_templates/layout.html create mode 100644 packages/google-cloud-oracledatabase/docs/conf.py create mode 100644 packages/google-cloud-oracledatabase/docs/index.rst create mode 100644 packages/google-cloud-oracledatabase/docs/multiprocessing.rst create mode 100644 packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst create mode 100644 packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst create mode 100644 packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst create mode 100644 packages/google-cloud-oracledatabase/docs/summary_overview.md create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py create mode 100644 packages/google-cloud-oracledatabase/mypy.ini create mode 100644 packages/google-cloud-oracledatabase/noxfile.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json create mode 100755 packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh create mode 100644 packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py create mode 100644 packages/google-cloud-oracledatabase/setup.py create mode 100644 packages/google-cloud-oracledatabase/testing/.gitignore create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.10.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.11.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.12.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.7.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.8.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.9.txt create mode 100644 packages/google-cloud-oracledatabase/tests/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py diff --git a/packages/google-cloud-oracledatabase/.OwlBot.yaml b/packages/google-cloud-oracledatabase/.OwlBot.yaml new file mode 100644 index 000000000000..ebf74202aadf --- /dev/null +++ b/packages/google-cloud-oracledatabase/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/oracledatabase/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-oracledatabase/$1 +api-name: google-cloud-oracledatabase diff --git a/packages/google-cloud-oracledatabase/.coveragerc b/packages/google-cloud-oracledatabase/.coveragerc new file mode 100644 index 000000000000..645dc04f340e --- /dev/null +++ b/packages/google-cloud-oracledatabase/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/oracledatabase/__init__.py + google/cloud/oracledatabase/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-oracledatabase/.flake8 b/packages/google-cloud-oracledatabase/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-cloud-oracledatabase/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-oracledatabase/.gitignore b/packages/google-cloud-oracledatabase/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-oracledatabase/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-oracledatabase/.repo-metadata.json b/packages/google-cloud-oracledatabase/.repo-metadata.json new file mode 100644 index 000000000000..6d1b2164a92f --- /dev/null +++ b/packages/google-cloud-oracledatabase/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-oracledatabase", + "name_pretty": "Oracle Database@Google Cloud API", + "api_description": "The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases.", + "product_documentation": "https://cloud.google.com/oracle/database/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1492565", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-oracledatabase", + "api_id": "oracledatabase.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "oracledatabase" +} diff --git a/packages/google-cloud-oracledatabase/CHANGELOG.md b/packages/google-cloud-oracledatabase/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CONTRIBUTING.rst b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst new file mode 100644 index 000000000000..9b24d1115e7f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-oracledatabase + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-oracledatabase/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-oracledatabase/LICENSE b/packages/google-cloud-oracledatabase/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-oracledatabase/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-oracledatabase/MANIFEST.in b/packages/google-cloud-oracledatabase/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-cloud-oracledatabase/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-oracledatabase/README.rst b/packages/google-cloud-oracledatabase/README.rst new file mode 100644 index 000000000000..c050103f1ecd --- /dev/null +++ b/packages/google-cloud-oracledatabase/README.rst @@ -0,0 +1,108 @@ +Python Client for Oracle Database@Google Cloud API +================================================== + +|preview| |pypi| |versions| + +`Oracle Database@Google Cloud API`_: The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. _Oracle Database@Google Cloud API: https://cloud.google.com/oracle/database/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/oracle/database/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Oracle Database@Google Cloud API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Oracle Database@Google Cloud API.: https://cloud.google.com/oracle/database/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-oracledatabase + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-oracledatabase + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Oracle Database@Google Cloud API + to see other available methods on the client. +- Read the `Oracle Database@Google Cloud API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Oracle Database@Google Cloud API Product documentation: https://cloud.google.com/oracle/database/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-oracledatabase/docs/CHANGELOG.md b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/README.rst b/packages/google-cloud-oracledatabase/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/_static/custom.css b/packages/google-cloud-oracledatabase/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-oracledatabase/docs/_templates/layout.html b/packages/google-cloud-oracledatabase/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-oracledatabase/docs/conf.py b/packages/google-cloud-oracledatabase/docs/conf.py new file mode 100644 index 000000000000..a4b21f79d825 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-oracledatabase documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-oracledatabase" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-oracledatabase", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-oracledatabase-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-oracledatabase.tex", + "google-cloud-oracledatabase Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-oracledatabase", + "google-cloud-oracledatabase Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-oracledatabase", + "google-cloud-oracledatabase Documentation", + author, + "google-cloud-oracledatabase", + "google-cloud-oracledatabase Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-oracledatabase/docs/index.rst b/packages/google-cloud-oracledatabase/docs/index.rst new file mode 100644 index 000000000000..77ff04e09fa0 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + oracledatabase_v1/services_ + oracledatabase_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-oracledatabase`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-oracledatabase/docs/multiprocessing.rst b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst new file mode 100644 index 000000000000..ef9ce591ce83 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst @@ -0,0 +1,10 @@ +OracleDatabase +-------------------------------- + +.. automodule:: google.cloud.oracledatabase_v1.services.oracle_database + :members: + :inherited-members: + +.. automodule:: google.cloud.oracledatabase_v1.services.oracle_database.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst new file mode 100644 index 000000000000..8b9decce8ef1 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Oracledatabase v1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + oracle_database diff --git a/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst new file mode 100644 index 000000000000..addba88a94ee --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Oracledatabase v1 API +============================================ + +.. automodule:: google.cloud.oracledatabase_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-oracledatabase/docs/summary_overview.md b/packages/google-cloud-oracledatabase/docs/summary_overview.md new file mode 100644 index 000000000000..326e6e99fa26 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Oracle Database@Google Cloud API + +Overview of the APIs available for Oracle Database@Google Cloud API. + +## All entries + +Classes, methods and properties & attributes for +Oracle Database@Google Cloud API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_property.html) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py new file mode 100644 index 000000000000..29f02c59e323 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.oracledatabase_v1.services.oracle_database.client import ( + OracleDatabaseClient, +) +from google.cloud.oracledatabase_v1.types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import ( + AutonomousDatabaseCharacterSet, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_version import ( + AutonomousDbVersion, +) +from google.cloud.oracledatabase_v1.types.common import CustomerContact +from google.cloud.oracledatabase_v1.types.db_node import DbNode, DbNodeProperties +from google.cloud.oracledatabase_v1.types.db_server import DbServer, DbServerProperties +from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape +from google.cloud.oracledatabase_v1.types.entitlement import ( + CloudAccountDetails, + Entitlement, +) +from google.cloud.oracledatabase_v1.types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from google.cloud.oracledatabase_v1.types.gi_version import GiVersion +from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata +from google.cloud.oracledatabase_v1.types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from google.cloud.oracledatabase_v1.types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "OracleDatabaseClient", + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed new file mode 100644 index 000000000000..cd7e437b6f62 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-oracledatabase package uses inline types. diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..225fa0bdbb4c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.oracle_database import OracleDatabaseClient +from .types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .types.autonomous_db_version import AutonomousDbVersion +from .types.common import CustomerContact +from .types.db_node import DbNode, DbNodeProperties +from .types.db_server import DbServer, DbServerProperties +from .types.db_system_shape import DbSystemShape +from .types.entitlement import CloudAccountDetails, Entitlement +from .types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .types.gi_version import GiVersion +from .types.location_metadata import LocationMetadata +from .types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "AutonomousDbVersion", + "CloudAccountDetails", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "CloudVmCluster", + "CloudVmClusterProperties", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "CustomerContact", + "DBWorkload", + "DataCollectionOptions", + "DatabaseConnectionStringProfile", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "Entitlement", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GenerateType", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "GiVersion", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "LocationMetadata", + "MaintenanceWindow", + "OperationMetadata", + "OperationsInsightsState", + "OracleDatabaseClient", + "RestoreAutonomousDatabaseRequest", + "ScheduledOperationDetails", + "State", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json new file mode 100644 index 000000000000..847abe3bdc22 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json @@ -0,0 +1,128 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.oracledatabase_v1", + "protoPackage": "google.cloud.oracledatabase.v1", + "schema": "1.0", + "services": { + "OracleDatabase": { + "clients": { + "rest": { + "libraryClient": "OracleDatabaseClient", + "rpcs": { + "CreateAutonomousDatabase": { + "methods": [ + "create_autonomous_database" + ] + }, + "CreateCloudExadataInfrastructure": { + "methods": [ + "create_cloud_exadata_infrastructure" + ] + }, + "CreateCloudVmCluster": { + "methods": [ + "create_cloud_vm_cluster" + ] + }, + "DeleteAutonomousDatabase": { + "methods": [ + "delete_autonomous_database" + ] + }, + "DeleteCloudExadataInfrastructure": { + "methods": [ + "delete_cloud_exadata_infrastructure" + ] + }, + "DeleteCloudVmCluster": { + "methods": [ + "delete_cloud_vm_cluster" + ] + }, + "GenerateAutonomousDatabaseWallet": { + "methods": [ + "generate_autonomous_database_wallet" + ] + }, + "GetAutonomousDatabase": { + "methods": [ + "get_autonomous_database" + ] + }, + "GetCloudExadataInfrastructure": { + "methods": [ + "get_cloud_exadata_infrastructure" + ] + }, + "GetCloudVmCluster": { + "methods": [ + "get_cloud_vm_cluster" + ] + }, + "ListAutonomousDatabaseBackups": { + "methods": [ + "list_autonomous_database_backups" + ] + }, + "ListAutonomousDatabaseCharacterSets": { + "methods": [ + "list_autonomous_database_character_sets" + ] + }, + "ListAutonomousDatabases": { + "methods": [ + "list_autonomous_databases" + ] + }, + "ListAutonomousDbVersions": { + "methods": [ + "list_autonomous_db_versions" + ] + }, + "ListCloudExadataInfrastructures": { + "methods": [ + "list_cloud_exadata_infrastructures" + ] + }, + "ListCloudVmClusters": { + "methods": [ + "list_cloud_vm_clusters" + ] + }, + "ListDbNodes": { + "methods": [ + "list_db_nodes" + ] + }, + "ListDbServers": { + "methods": [ + "list_db_servers" + ] + }, + "ListDbSystemShapes": { + "methods": [ + "list_db_system_shapes" + ] + }, + "ListEntitlements": { + "methods": [ + "list_entitlements" + ] + }, + "ListGiVersions": { + "methods": [ + "list_gi_versions" + ] + }, + "RestoreAutonomousDatabase": { + "methods": [ + "restore_autonomous_database" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed new file mode 100644 index 000000000000..cd7e437b6f62 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-oracledatabase package uses inline types. diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py new file mode 100644 index 000000000000..947b9516b5e7 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import OracleDatabaseClient + +__all__ = ("OracleDatabaseClient",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py new file mode 100644 index 000000000000..9a4182820e59 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py @@ -0,0 +1,4073 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.oracledatabase_v1.services.oracle_database import pagers +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database + +from .transports.base import DEFAULT_CLIENT_INFO, OracleDatabaseTransport +from .transports.rest import OracleDatabaseRestTransport + + +class OracleDatabaseClientMeta(type): + """Metaclass for the OracleDatabase client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OracleDatabaseTransport]] + _transport_registry["rest"] = OracleDatabaseRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OracleDatabaseTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OracleDatabaseClient(metaclass=OracleDatabaseClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "oracledatabase.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "oracledatabase.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OracleDatabaseClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OracleDatabaseClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OracleDatabaseTransport: + """Returns the transport used by the client instance. + + Returns: + OracleDatabaseTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def autonomous_database_path( + project: str, + location: str, + autonomous_database: str, + ) -> str: + """Returns a fully-qualified autonomous_database string.""" + return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) + + @staticmethod + def parse_autonomous_database_path(path: str) -> Dict[str, str]: + """Parses a autonomous_database path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_database_backup_path( + project: str, + location: str, + autonomous_database_backup: str, + ) -> str: + """Returns a fully-qualified autonomous_database_backup string.""" + return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) + + @staticmethod + def parse_autonomous_database_backup_path(path: str) -> Dict[str, str]: + """Parses a autonomous_database_backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_database_character_set_path( + project: str, + location: str, + autonomous_database_character_set: str, + ) -> str: + """Returns a fully-qualified autonomous_database_character_set string.""" + return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) + + @staticmethod + def parse_autonomous_database_character_set_path(path: str) -> Dict[str, str]: + """Parses a autonomous_database_character_set path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_db_version_path( + project: str, + location: str, + autonomous_db_version: str, + ) -> str: + """Returns a fully-qualified autonomous_db_version string.""" + return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) + + @staticmethod + def parse_autonomous_db_version_path(path: str) -> Dict[str, str]: + """Parses a autonomous_db_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cloud_exadata_infrastructure_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + ) -> str: + """Returns a fully-qualified cloud_exadata_infrastructure string.""" + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) + + @staticmethod + def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str, str]: + """Parses a cloud_exadata_infrastructure path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cloud_vm_cluster_path( + project: str, + location: str, + cloud_vm_cluster: str, + ) -> str: + """Returns a fully-qualified cloud_vm_cluster string.""" + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) + + @staticmethod + def parse_cloud_vm_cluster_path(path: str) -> Dict[str, str]: + """Parses a cloud_vm_cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def db_node_path( + project: str, + location: str, + cloud_vm_cluster: str, + db_node: str, + ) -> str: + """Returns a fully-qualified db_node string.""" + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) + + @staticmethod + def parse_db_node_path(path: str) -> Dict[str, str]: + """Parses a db_node path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def db_server_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + db_server: str, + ) -> str: + """Returns a fully-qualified db_server string.""" + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) + + @staticmethod + def parse_db_server_path(path: str) -> Dict[str, str]: + """Parses a db_server path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def db_system_shape_path( + project: str, + location: str, + db_system_shape: str, + ) -> str: + """Returns a fully-qualified db_system_shape string.""" + return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) + + @staticmethod + def parse_db_system_shape_path(path: str) -> Dict[str, str]: + """Parses a db_system_shape path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def entitlement_path( + project: str, + location: str, + entitlement: str, + ) -> str: + """Returns a fully-qualified entitlement string.""" + return ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) + + @staticmethod + def parse_entitlement_path(path: str) -> Dict[str, str]: + """Parses a entitlement path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def gi_version_path( + project: str, + location: str, + gi_version: str, + ) -> str: + """Returns a fully-qualified gi_version string.""" + return "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) + + @staticmethod + def parse_gi_version_path(path: str) -> Dict[str, str]: + """Parses a gi_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = OracleDatabaseClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or OracleDatabaseClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the oracle database client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OracleDatabaseTransport,Callable[..., OracleDatabaseTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OracleDatabaseTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OracleDatabaseClient._read_environment_variables() + self._client_cert_source = OracleDatabaseClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = OracleDatabaseClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, OracleDatabaseTransport) + if transport_provided: + # transport is a OracleDatabaseTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(OracleDatabaseTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or OracleDatabaseClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport] + ] = ( + OracleDatabaseClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., OracleDatabaseTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_cloud_exadata_infrastructures( + self, + request: Optional[ + Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudExadataInfrastructuresPager: + r"""Lists Exadata Infrastructures in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_cloud_exadata_infrastructures(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudExadataInfrastructuresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_exadata_infrastructures(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest, dict]): + The request object. The request for ``CloudExadataInfrastructures.List``. + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudExadataInfrastructuresPager: + The response for CloudExadataInfrastructures.list. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.ListCloudExadataInfrastructuresRequest + ): + request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_cloud_exadata_infrastructures + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCloudExadataInfrastructuresPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Gets details of a single Exadata Infrastructure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_exadata_infrastructure(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Get``. + name (str): + Required. The name of the Cloud Exadata Infrastructure + in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetCloudExadataInfrastructureRequest): + request = oracledatabase.GetCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_cloud_exadata_infrastructure + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_exadata_infrastructure: Optional[ + exadata_infra.CloudExadataInfrastructure + ] = None, + cloud_exadata_infrastructure_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Exadata Infrastructure in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.CreateCloudExadataInfrastructureRequest( + parent="parent_value", + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + # Make the request + operation = client.create_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Create``. + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_exadata_infrastructure (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure): + Required. Details of the Exadata + Infrastructure instance to create. + + This corresponds to the ``cloud_exadata_infrastructure`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_exadata_infrastructure_id (str): + Required. The ID of the Exadata Infrastructure to + create. This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a + maximum of 63 characters in length. The value must start + with a letter and end with a letter or a number. + + This corresponds to the ``cloud_exadata_infrastructure_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure` Represents CloudExadataInfrastructure resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.CreateCloudExadataInfrastructureRequest + ): + request = oracledatabase.CreateCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cloud_exadata_infrastructure is not None: + request.cloud_exadata_infrastructure = cloud_exadata_infrastructure + if cloud_exadata_infrastructure_id is not None: + request.cloud_exadata_infrastructure_id = ( + cloud_exadata_infrastructure_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_cloud_exadata_infrastructure + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + exadata_infra.CloudExadataInfrastructure, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Exadata Infrastructure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + name (str): + Required. The name of the Cloud Exadata Infrastructure + in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.DeleteCloudExadataInfrastructureRequest + ): + request = oracledatabase.DeleteCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_cloud_exadata_infrastructure + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_cloud_vm_clusters( + self, + request: Optional[ + Union[oracledatabase.ListCloudVmClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudVmClustersPager: + r"""Lists the VM Clusters in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_cloud_vm_clusters(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudVmClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_vm_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest, dict]): + The request object. The request for ``CloudVmCluster.List``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudVmClustersPager: + The response for CloudVmCluster.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListCloudVmClustersRequest): + request = oracledatabase.ListCloudVmClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cloud_vm_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCloudVmClustersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cloud_vm_cluster( + self, + request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: + r"""Gets details of a single VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_vm_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Get``. + name (str): + Required. The name of the Cloud VM Cluster in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.CloudVmCluster: + Details of the Cloud VM Cluster + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetCloudVmClusterRequest): + request = oracledatabase.GetCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.CreateCloudVmClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, + cloud_vm_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new VM Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + cloud_vm_cluster = oracledatabase_v1.CloudVmCluster() + cloud_vm_cluster.exadata_infrastructure = "exadata_infrastructure_value" + cloud_vm_cluster.cidr = "cidr_value" + cloud_vm_cluster.backup_subnet_cidr = "backup_subnet_cidr_value" + cloud_vm_cluster.network = "network_value" + + request = oracledatabase_v1.CreateCloudVmClusterRequest( + parent="parent_value", + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + cloud_vm_cluster=cloud_vm_cluster, + ) + + # Make the request + operation = client.create_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Create``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_vm_cluster (google.cloud.oracledatabase_v1.types.CloudVmCluster): + Required. The resource being created + This corresponds to the ``cloud_vm_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_vm_cluster_id (str): + Required. The ID of the VM Cluster to create. This value + is restricted to (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) + and must be a maximum of 63 characters in length. The + value must start with a letter and end with a letter or + a number. + + This corresponds to the ``cloud_vm_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.CloudVmCluster` Details of the Cloud VM Cluster resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cloud_vm_cluster, cloud_vm_cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateCloudVmClusterRequest): + request = oracledatabase.CreateCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cloud_vm_cluster is not None: + request.cloud_vm_cluster = cloud_vm_cluster + if cloud_vm_cluster_id is not None: + request.cloud_vm_cluster_id = cloud_vm_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vm_cluster.CloudVmCluster, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudVmClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Delete``. + name (str): + Required. The name of the Cloud VM Cluster in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteCloudVmClusterRequest): + request = oracledatabase.DeleteCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entitlements( + self, + request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitlementsPager: + r"""Lists the entitlements in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_entitlements(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListEntitlementsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entitlements(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListEntitlementsRequest, dict]): + The request object. The request for ``Entitlement.List``. + parent (str): + Required. The parent value for the + entitlement in the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListEntitlementsPager: + The response for Entitlement.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListEntitlementsRequest): + request = oracledatabase.ListEntitlementsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entitlements] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitlementsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_servers( + self, + request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbServersPager: + r"""Lists the database servers of an Exadata + Infrastructure instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_servers(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbServersRequest, dict]): + The request object. The request for ``DbServer.List``. + parent (str): + Required. The parent value for + database server in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloudExadataInfrastructure}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbServersPager: + The response for DbServer.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbServersRequest): + request = oracledatabase.ListDbServersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_servers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbServersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_nodes( + self, + request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbNodesPager: + r"""Lists the database nodes of a VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_nodes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbNodesRequest, dict]): + The request object. The request for ``DbNode.List``. + parent (str): + Required. The parent value for + database node in the following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloudVmCluster}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbNodesPager: + The response for DbNode.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbNodesRequest): + request = oracledatabase.ListDbNodesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbNodesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_gi_versions( + self, + request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGiVersionsPager: + r"""Lists all the valid Oracle Grid Infrastructure (GI) + versions for the given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_gi_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListGiVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gi_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListGiVersionsRequest, dict]): + The request object. The request for ``GiVersion.List``. + parent (str): + Required. The parent value for Grid + Infrastructure Version in the following + format: Format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListGiVersionsPager: + The response for GiVersion.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListGiVersionsRequest): + request = oracledatabase.ListGiVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_gi_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGiVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_system_shapes( + self, + request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbSystemShapesPager: + r"""Lists the database system shapes available for the + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_system_shapes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbSystemShapesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_system_shapes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest, dict]): + The request object. The request for ``DbSystemShape.List``. + parent (str): + Required. The parent value for + Database System Shapes in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbSystemShapesPager: + The response for DbSystemShape.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbSystemShapesRequest): + request = oracledatabase.ListDbSystemShapesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_system_shapes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbSystemShapesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_databases( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabasesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabasesPager: + r"""Lists the Autonomous Databases in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_databases(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest, dict]): + The request object. The request for ``AutonomousDatabase.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabasesPager: + The response for AutonomousDatabase.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabasesRequest): + request = oracledatabase.ListAutonomousDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_databases + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabasesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.GetAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: + r"""Gets the details of a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_autonomous_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Get``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.AutonomousDatabase: + Details of the Autonomous Database + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetAutonomousDatabaseRequest): + request = oracledatabase.GetAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_autonomous_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.CreateAutonomousDatabaseRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + autonomous_database: Optional[ + gco_autonomous_database.AutonomousDatabase + ] = None, + autonomous_database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Autonomous Database in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + autonomous_database = oracledatabase_v1.AutonomousDatabase() + autonomous_database.network = "network_value" + autonomous_database.cidr = "cidr_value" + + request = oracledatabase_v1.CreateAutonomousDatabaseRequest( + parent="parent_value", + autonomous_database_id="autonomous_database_id_value", + autonomous_database=autonomous_database, + ) + + # Make the request + operation = client.create_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Create``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autonomous_database (google.cloud.oracledatabase_v1.types.AutonomousDatabase): + Required. The Autonomous Database + being created. + + This corresponds to the ``autonomous_database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autonomous_database_id (str): + Required. The ID of the Autonomous Database to create. + This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a + maximum of 63 characters in length. The value must start + with a letter and end with a letter or a number. + + This corresponds to the ``autonomous_database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.AutonomousDatabase` Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, autonomous_database, autonomous_database_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateAutonomousDatabaseRequest): + request = oracledatabase.CreateAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if autonomous_database is not None: + request.autonomous_database = autonomous_database + if autonomous_database_id is not None: + request.autonomous_database_id = autonomous_database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_autonomous_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gco_autonomous_database.AutonomousDatabase, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Delete``. + name (str): + Required. The name of the resource in the following + format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteAutonomousDatabaseRequest): + request = oracledatabase.DeleteAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_autonomous_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + restore_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restores a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_restore_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.RestoreAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.restore_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.RestoreAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Restore``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + restore_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time and date to + restore the database to. + + This corresponds to the ``restore_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.AutonomousDatabase` Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, restore_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.RestoreAutonomousDatabaseRequest): + request = oracledatabase.RestoreAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if restore_time is not None: + request.restore_time = restore_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.restore_autonomous_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + autonomous_database.AutonomousDatabase, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_autonomous_database_wallet( + self, + request: Optional[ + Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict] + ] = None, + *, + name: Optional[str] = None, + type_: Optional[autonomous_database.GenerateType] = None, + is_regional: Optional[bool] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Generates a wallet for an Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_generate_autonomous_database_wallet(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GenerateAutonomousDatabaseWalletRequest( + name="name_value", + password="password_value", + ) + + # Make the request + response = client.generate_autonomous_database_wallet(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletRequest, dict]): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + type_ (google.cloud.oracledatabase_v1.types.GenerateType): + Optional. The type of wallet + generation for the Autonomous Database. + The default value is SINGLE. + + This corresponds to the ``type_`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + is_regional (bool): + Optional. True when requesting + regional connection strings in PDB + connect info, applicable to cross-region + Data Guard only. + + This corresponds to the ``is_regional`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (str): + Required. The password used to + encrypt the keys inside the wallet. The + password must be a minimum of 8 + characters. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletResponse: + The response for AutonomousDatabase.GenerateWallet. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, type_, is_regional, password]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.GenerateAutonomousDatabaseWalletRequest + ): + request = oracledatabase.GenerateAutonomousDatabaseWalletRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if type_ is not None: + request.type_ = type_ + if is_regional is not None: + request.is_regional = is_regional + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_autonomous_database_wallet + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_db_versions( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDbVersionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDbVersionsPager: + r"""Lists all the available Autonomous Database versions + for a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_db_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDbVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_db_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest, dict]): + The request object. The request for ``AutonomousDbVersion.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDbVersionsPager: + The response for AutonomousDbVersion.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDbVersionsRequest): + request = oracledatabase.ListAutonomousDbVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_db_versions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDbVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_database_character_sets( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: + r"""Lists Autonomous Database Character Sets in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_database_character_sets(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseCharacterSetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_character_sets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest, dict]): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseCharacterSetsPager: + The response for AutonomousDatabaseCharacterSet.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest + ): + request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_character_sets + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabaseCharacterSetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_database_backups( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseBackupsPager: + r"""Lists the long-term and automatic backups of an + Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_database_backups(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest, dict]): + The request object. The request for ``AutonomousDatabaseBackup.List``. + parent (str): + Required. The parent value for + ListAutonomousDatabaseBackups in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseBackupsPager: + The response for AutonomousDatabaseBackup.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabaseBackupsRequest): + request = oracledatabase.ListAutonomousDatabaseBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_backups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabaseBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OracleDatabaseClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OracleDatabaseClient",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py new file mode 100644 index 000000000000..111ec9a9d392 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py @@ -0,0 +1,876 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) + + +class ListCloudExadataInfrastructuresPager: + """A pager for iterating through ``list_cloud_exadata_infrastructures`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cloud_exadata_infrastructures`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCloudExadataInfrastructures`` requests and continue to iterate + through the ``cloud_exadata_infrastructures`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + response: oracledatabase.ListCloudExadataInfrastructuresResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListCloudExadataInfrastructuresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: + for page in self.pages: + yield from page.cloud_exadata_infrastructures + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCloudVmClustersPager: + """A pager for iterating through ``list_cloud_vm_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cloud_vm_clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCloudVmClusters`` requests and continue to iterate + through the ``cloud_vm_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudVmClustersResponse], + request: oracledatabase.ListCloudVmClustersRequest, + response: oracledatabase.ListCloudVmClustersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListCloudVmClustersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListCloudVmClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: + for page in self.pages: + yield from page.cloud_vm_clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEntitlementsPager: + """A pager for iterating through ``list_entitlements`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListEntitlementsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entitlements`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntitlements`` requests and continue to iterate + through the ``entitlements`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListEntitlementsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListEntitlementsResponse], + request: oracledatabase.ListEntitlementsRequest, + response: oracledatabase.ListEntitlementsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListEntitlementsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListEntitlementsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListEntitlementsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListEntitlementsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[entitlement.Entitlement]: + for page in self.pages: + yield from page.entitlements + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDbServersPager: + """A pager for iterating through ``list_db_servers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbServersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_servers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbServers`` requests and continue to iterate + through the ``db_servers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbServersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbServersResponse], + request: oracledatabase.ListDbServersRequest, + response: oracledatabase.ListDbServersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbServersRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbServersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbServersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbServersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[db_server.DbServer]: + for page in self.pages: + yield from page.db_servers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDbNodesPager: + """A pager for iterating through ``list_db_nodes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbNodesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_nodes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbNodes`` requests and continue to iterate + through the ``db_nodes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbNodesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbNodesResponse], + request: oracledatabase.ListDbNodesRequest, + response: oracledatabase.ListDbNodesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbNodesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbNodesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbNodesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbNodesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[db_node.DbNode]: + for page in self.pages: + yield from page.db_nodes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGiVersionsPager: + """A pager for iterating through ``list_gi_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListGiVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``gi_versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGiVersions`` requests and continue to iterate + through the ``gi_versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListGiVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListGiVersionsResponse], + request: oracledatabase.ListGiVersionsRequest, + response: oracledatabase.ListGiVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListGiVersionsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListGiVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListGiVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListGiVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[gi_version.GiVersion]: + for page in self.pages: + yield from page.gi_versions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDbSystemShapesPager: + """A pager for iterating through ``list_db_system_shapes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_system_shapes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbSystemShapes`` requests and continue to iterate + through the ``db_system_shapes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbSystemShapesResponse], + request: oracledatabase.ListDbSystemShapesRequest, + response: oracledatabase.ListDbSystemShapesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbSystemShapesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbSystemShapesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: + for page in self.pages: + yield from page.db_system_shapes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabasesPager: + """A pager for iterating through ``list_autonomous_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabases`` requests and continue to iterate + through the ``autonomous_databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], + request: oracledatabase.ListAutonomousDatabasesRequest, + response: oracledatabase.ListAutonomousDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: + for page in self.pages: + yield from page.autonomous_databases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDbVersionsPager: + """A pager for iterating through ``list_autonomous_db_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_db_versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDbVersions`` requests and continue to iterate + through the ``autonomous_db_versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], + request: oracledatabase.ListAutonomousDbVersionsRequest, + response: oracledatabase.ListAutonomousDbVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDbVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDbVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: + for page in self.pages: + yield from page.autonomous_db_versions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabaseCharacterSetsPager: + """A pager for iterating through ``list_autonomous_database_character_sets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_database_character_sets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabaseCharacterSets`` requests and continue to iterate + through the ``autonomous_database_character_sets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ], + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__( + self, + ) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: + for page in self.pages: + yield from page.autonomous_database_character_sets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabaseBackupsPager: + """A pager for iterating through ``list_autonomous_database_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_database_backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabaseBackups`` requests and continue to iterate + through the ``autonomous_database_backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + response: oracledatabase.ListAutonomousDatabaseBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabaseBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: + for page in self.pages: + yield from page.autonomous_database_backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py new file mode 100644 index 000000000000..91a06d71780e --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OracleDatabaseTransport +from .rest import OracleDatabaseRestInterceptor, OracleDatabaseRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] +_transport_registry["rest"] = OracleDatabaseRestTransport + +__all__ = ( + "OracleDatabaseTransport", + "OracleDatabaseRestTransport", + "OracleDatabaseRestInterceptor", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py new file mode 100644 index 000000000000..ced22db4e8d6 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py @@ -0,0 +1,731 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.oracledatabase_v1 import gapic_version as package_version +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class OracleDatabaseTransport(abc.ABC): + """Abstract transport class for OracleDatabase.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "oracledatabase.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'oracledatabase.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_cloud_exadata_infrastructures: gapic_v1.method.wrap_method( + self.list_cloud_exadata_infrastructures, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.get_cloud_exadata_infrastructure, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.create_cloud_exadata_infrastructure, + default_timeout=None, + client_info=client_info, + ), + self.delete_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.delete_cloud_exadata_infrastructure, + default_timeout=None, + client_info=client_info, + ), + self.list_cloud_vm_clusters: gapic_v1.method.wrap_method( + self.list_cloud_vm_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.get_cloud_vm_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.create_cloud_vm_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.delete_cloud_vm_cluster, + default_timeout=None, + client_info=client_info, + ), + self.list_entitlements: gapic_v1.method.wrap_method( + self.list_entitlements, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_servers: gapic_v1.method.wrap_method( + self.list_db_servers, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_nodes: gapic_v1.method.wrap_method( + self.list_db_nodes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_gi_versions: gapic_v1.method.wrap_method( + self.list_gi_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_system_shapes: gapic_v1.method.wrap_method( + self.list_db_system_shapes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_databases: gapic_v1.method.wrap_method( + self.list_autonomous_databases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_autonomous_database: gapic_v1.method.wrap_method( + self.get_autonomous_database, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_autonomous_database: gapic_v1.method.wrap_method( + self.create_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.delete_autonomous_database: gapic_v1.method.wrap_method( + self.delete_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.restore_autonomous_database: gapic_v1.method.wrap_method( + self.restore_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.generate_autonomous_database_wallet: gapic_v1.method.wrap_method( + self.generate_autonomous_database_wallet, + default_timeout=None, + client_info=client_info, + ), + self.list_autonomous_db_versions: gapic_v1.method.wrap_method( + self.list_autonomous_db_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_database_character_sets: gapic_v1.method.wrap_method( + self.list_autonomous_database_character_sets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_database_backups: gapic_v1.method.wrap_method( + self.list_autonomous_database_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + Union[ + oracledatabase.ListCloudExadataInfrastructuresResponse, + Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + Union[ + exadata_infra.CloudExadataInfrastructure, + Awaitable[exadata_infra.CloudExadataInfrastructure], + ], + ]: + raise NotImplementedError() + + @property + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + Union[ + oracledatabase.ListCloudVmClustersResponse, + Awaitable[oracledatabase.ListCloudVmClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.GetCloudVmClusterRequest], + Union[vm_cluster.CloudVmCluster, Awaitable[vm_cluster.CloudVmCluster]], + ]: + raise NotImplementedError() + + @property + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + Union[ + oracledatabase.ListEntitlementsResponse, + Awaitable[oracledatabase.ListEntitlementsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], + Union[ + oracledatabase.ListDbServersResponse, + Awaitable[oracledatabase.ListDbServersResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], + Union[ + oracledatabase.ListDbNodesResponse, + Awaitable[oracledatabase.ListDbNodesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], + Union[ + oracledatabase.ListGiVersionsResponse, + Awaitable[oracledatabase.ListGiVersionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + Union[ + oracledatabase.ListDbSystemShapesResponse, + Awaitable[oracledatabase.ListDbSystemShapesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + Union[ + oracledatabase.ListAutonomousDatabasesResponse, + Awaitable[oracledatabase.ListAutonomousDatabasesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + Union[ + autonomous_database.AutonomousDatabase, + Awaitable[autonomous_database.AutonomousDatabase], + ], + ]: + raise NotImplementedError() + + @property + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + Union[ + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + Union[ + oracledatabase.ListAutonomousDbVersionsResponse, + Awaitable[oracledatabase.ListAutonomousDbVersionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseBackupsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OracleDatabaseTransport",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py new file mode 100644 index 000000000000..ad8d2e4a9c29 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py @@ -0,0 +1,3718 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OracleDatabaseTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OracleDatabaseRestInterceptor: + """Interceptor for OracleDatabase. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OracleDatabaseRestTransport. + + .. code-block:: python + class MyCustomOracleDatabaseInterceptor(OracleDatabaseRestInterceptor): + def pre_create_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_autonomous_database_wallet(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_autonomous_database_wallet(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_database_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_database_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_database_character_sets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_database_character_sets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_db_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_db_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cloud_exadata_infrastructures(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cloud_exadata_infrastructures(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cloud_vm_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cloud_vm_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_nodes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_servers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_servers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_system_shapes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_system_shapes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entitlements(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entitlements(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_gi_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_gi_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OracleDatabaseRestTransport(interceptor=MyCustomOracleDatabaseInterceptor()) + client = OracleDatabaseClient(transport=transport) + + + """ + + def pre_create_autonomous_database( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_create_cloud_exadata_infrastructure( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_create_cloud_vm_cluster( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_autonomous_database( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_cloud_exadata_infrastructure( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_cloud_vm_cluster( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_generate_autonomous_database_wallet( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for generate_autonomous_database_wallet + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_generate_autonomous_database_wallet( + self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + """Post-rpc interceptor for generate_autonomous_database_wallet + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_autonomous_database( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_autonomous_database( + self, response: autonomous_database.AutonomousDatabase + ) -> autonomous_database.AutonomousDatabase: + """Post-rpc interceptor for get_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_cloud_exadata_infrastructure( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_cloud_exadata_infrastructure( + self, response: exadata_infra.CloudExadataInfrastructure + ) -> exadata_infra.CloudExadataInfrastructure: + """Post-rpc interceptor for get_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_cloud_vm_cluster( + self, + request: oracledatabase.GetCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_cloud_vm_cluster( + self, response: vm_cluster.CloudVmCluster + ) -> vm_cluster.CloudVmCluster: + """Post-rpc interceptor for get_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_database_backups( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autonomous_database_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_database_backups( + self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + """Post-rpc interceptor for list_autonomous_database_backups + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_database_character_sets( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_autonomous_database_character_sets + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_database_character_sets( + self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + """Post-rpc interceptor for list_autonomous_database_character_sets + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_databases( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autonomous_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_databases( + self, response: oracledatabase.ListAutonomousDatabasesResponse + ) -> oracledatabase.ListAutonomousDatabasesResponse: + """Post-rpc interceptor for list_autonomous_databases + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_db_versions( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autonomous_db_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_db_versions( + self, response: oracledatabase.ListAutonomousDbVersionsResponse + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + """Post-rpc interceptor for list_autonomous_db_versions + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_cloud_exadata_infrastructures( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_cloud_exadata_infrastructures + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_cloud_exadata_infrastructures( + self, response: oracledatabase.ListCloudExadataInfrastructuresResponse + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + """Post-rpc interceptor for list_cloud_exadata_infrastructures + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_cloud_vm_clusters( + self, + request: oracledatabase.ListCloudVmClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_cloud_vm_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_cloud_vm_clusters( + self, response: oracledatabase.ListCloudVmClustersResponse + ) -> oracledatabase.ListCloudVmClustersResponse: + """Post-rpc interceptor for list_cloud_vm_clusters + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_db_nodes( + self, + request: oracledatabase.ListDbNodesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_nodes + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_nodes( + self, response: oracledatabase.ListDbNodesResponse + ) -> oracledatabase.ListDbNodesResponse: + """Post-rpc interceptor for list_db_nodes + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_db_servers( + self, + request: oracledatabase.ListDbServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_servers( + self, response: oracledatabase.ListDbServersResponse + ) -> oracledatabase.ListDbServersResponse: + """Post-rpc interceptor for list_db_servers + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_db_system_shapes( + self, + request: oracledatabase.ListDbSystemShapesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_system_shapes + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_system_shapes( + self, response: oracledatabase.ListDbSystemShapesResponse + ) -> oracledatabase.ListDbSystemShapesResponse: + """Post-rpc interceptor for list_db_system_shapes + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_entitlements( + self, + request: oracledatabase.ListEntitlementsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_entitlements + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_entitlements( + self, response: oracledatabase.ListEntitlementsResponse + ) -> oracledatabase.ListEntitlementsResponse: + """Post-rpc interceptor for list_entitlements + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_gi_versions( + self, + request: oracledatabase.ListGiVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_gi_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_gi_versions( + self, response: oracledatabase.ListGiVersionsResponse + ) -> oracledatabase.ListGiVersionsResponse: + """Post-rpc interceptor for list_gi_versions + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_restore_autonomous_database( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for restore_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_restore_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OracleDatabaseRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OracleDatabaseRestInterceptor + + +class OracleDatabaseRestTransport(OracleDatabaseTransport): + """REST backend transport for OracleDatabase. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "oracledatabase.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OracleDatabaseRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'oracledatabase.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OracleDatabaseRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "autonomousDatabaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.CreateAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + "body": "autonomous_database", + }, + ] + request, metadata = self._interceptor.pre_create_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.CreateAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_autonomous_database(resp) + return resp + + class _CreateCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudExadataInfrastructureId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + "body": "cloud_exadata_infrastructure", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_create_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cloud_exadata_infrastructure(resp) + return resp + + class _CreateCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudVmClusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + "body": "cloud_vm_cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cloud_vm_cluster( + request, metadata + ) + pb_request = oracledatabase.CreateCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cloud_vm_cluster(resp) + return resp + + class _DeleteAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.DeleteAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.DeleteAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_autonomous_database(resp) + return resp + + class _DeleteCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_delete_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cloud_exadata_infrastructure(resp) + return resp + + class _DeleteCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cloud_vm_cluster( + request, metadata + ) + pb_request = oracledatabase.DeleteCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cloud_vm_cluster(resp) + return resp + + class _GenerateAutonomousDatabaseWallet(OracleDatabaseRestStub): + def __hash__(self): + return hash("GenerateAutonomousDatabaseWallet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Call the generate autonomous + database wallet method over HTTP. + + Args: + request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: + The response for ``AutonomousDatabase.GenerateWallet``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_generate_autonomous_database_wallet( + request, metadata + ) + pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + pb_resp = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_autonomous_database_wallet(resp) + return resp + + class _GetAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: + r"""Call the get autonomous database method over HTTP. + + Args: + request (~.oracledatabase.GetAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autonomous_database.AutonomousDatabase: + Details of the Autonomous Database + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.GetAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autonomous_database.AutonomousDatabase() + pb_resp = autonomous_database.AutonomousDatabase.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_autonomous_database(resp) + return resp + + class _GetCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Call the get cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.GetCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.exadata_infra.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.GetCloudExadataInfrastructureRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = exadata_infra.CloudExadataInfrastructure() + pb_resp = exadata_infra.CloudExadataInfrastructure.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cloud_exadata_infrastructure(resp) + return resp + + class _GetCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: + r"""Call the get cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.GetCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vm_cluster.CloudVmCluster: + Details of the Cloud VM Cluster + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cloud_vm_cluster( + request, metadata + ) + pb_request = oracledatabase.GetCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vm_cluster.CloudVmCluster() + pb_resp = vm_cluster.CloudVmCluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cloud_vm_cluster(resp) + return resp + + class _ListAutonomousDatabaseBackups(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabaseBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + r"""Call the list autonomous database + backups method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): + The request object. The request for ``AutonomousDatabaseBackup.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: + The response for ``AutonomousDatabaseBackup.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups", + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_database_backups( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabaseBackupsResponse() + pb_resp = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_database_backups(resp) + return resp + + class _ListAutonomousDatabaseCharacterSets(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabaseCharacterSets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + r"""Call the list autonomous database + character sets method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + The response for + ``AutonomousDatabaseCharacterSet.List``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_autonomous_database_character_sets( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_database_character_sets(resp) + return resp + + class _ListAutonomousDatabases(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabasesResponse: + r"""Call the list autonomous databases method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabasesRequest): + The request object. The request for ``AutonomousDatabase.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabasesResponse: + The response for ``AutonomousDatabase.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_databases( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabasesResponse() + pb_resp = oracledatabase.ListAutonomousDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_databases(resp) + return resp + + class _ListAutonomousDbVersions(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDbVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + r"""Call the list autonomous db + versions method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDbVersionsRequest): + The request object. The request for ``AutonomousDbVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDbVersionsResponse: + The response for ``AutonomousDbVersion.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDbVersions", + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_db_versions( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDbVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDbVersionsResponse() + pb_resp = oracledatabase.ListAutonomousDbVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_db_versions(resp) + return resp + + class _ListCloudExadataInfrastructures(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListCloudExadataInfrastructures") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + r"""Call the list cloud exadata + infrastructures method over HTTP. + + Args: + request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): + The request object. The request for ``CloudExadataInfrastructures.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudExadataInfrastructuresResponse: + The response for ``CloudExadataInfrastructures.list``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_cloud_exadata_infrastructures( + request, metadata + ) + pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListCloudExadataInfrastructuresResponse() + pb_resp = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cloud_exadata_infrastructures(resp) + return resp + + class _ListCloudVmClusters(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListCloudVmClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListCloudVmClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudVmClustersResponse: + r"""Call the list cloud vm clusters method over HTTP. + + Args: + request (~.oracledatabase.ListCloudVmClustersRequest): + The request object. The request for ``CloudVmCluster.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudVmClustersResponse: + The response for ``CloudVmCluster.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + }, + ] + request, metadata = self._interceptor.pre_list_cloud_vm_clusters( + request, metadata + ) + pb_request = oracledatabase.ListCloudVmClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListCloudVmClustersResponse() + pb_resp = oracledatabase.ListCloudVmClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cloud_vm_clusters(resp) + return resp + + class _ListDbNodes(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbNodes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbNodesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbNodesResponse: + r"""Call the list db nodes method over HTTP. + + Args: + request (~.oracledatabase.ListDbNodesRequest): + The request object. The request for ``DbNode.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbNodesResponse: + The response for ``DbNode.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes", + }, + ] + request, metadata = self._interceptor.pre_list_db_nodes(request, metadata) + pb_request = oracledatabase.ListDbNodesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbNodesResponse() + pb_resp = oracledatabase.ListDbNodesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_nodes(resp) + return resp + + class _ListDbServers(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbServersResponse: + r"""Call the list db servers method over HTTP. + + Args: + request (~.oracledatabase.ListDbServersRequest): + The request object. The request for ``DbServer.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbServersResponse: + The response for ``DbServer.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers", + }, + ] + request, metadata = self._interceptor.pre_list_db_servers(request, metadata) + pb_request = oracledatabase.ListDbServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbServersResponse() + pb_resp = oracledatabase.ListDbServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_servers(resp) + return resp + + class _ListDbSystemShapes(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbSystemShapes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbSystemShapesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbSystemShapesResponse: + r"""Call the list db system shapes method over HTTP. + + Args: + request (~.oracledatabase.ListDbSystemShapesRequest): + The request object. The request for ``DbSystemShape.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbSystemShapesResponse: + The response for ``DbSystemShape.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dbSystemShapes", + }, + ] + request, metadata = self._interceptor.pre_list_db_system_shapes( + request, metadata + ) + pb_request = oracledatabase.ListDbSystemShapesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbSystemShapesResponse() + pb_resp = oracledatabase.ListDbSystemShapesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_system_shapes(resp) + return resp + + class _ListEntitlements(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListEntitlements") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListEntitlementsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListEntitlementsResponse: + r"""Call the list entitlements method over HTTP. + + Args: + request (~.oracledatabase.ListEntitlementsRequest): + The request object. The request for ``Entitlement.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListEntitlementsResponse: + The response for ``Entitlement.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/entitlements", + }, + ] + request, metadata = self._interceptor.pre_list_entitlements( + request, metadata + ) + pb_request = oracledatabase.ListEntitlementsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListEntitlementsResponse() + pb_resp = oracledatabase.ListEntitlementsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entitlements(resp) + return resp + + class _ListGiVersions(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListGiVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListGiVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListGiVersionsResponse: + r"""Call the list gi versions method over HTTP. + + Args: + request (~.oracledatabase.ListGiVersionsRequest): + The request object. The request for ``GiVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListGiVersionsResponse: + The response for ``GiVersion.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/giVersions", + }, + ] + request, metadata = self._interceptor.pre_list_gi_versions( + request, metadata + ) + pb_request = oracledatabase.ListGiVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListGiVersionsResponse() + pb_resp = oracledatabase.ListGiVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_gi_versions(resp) + return resp + + class _RestoreAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("RestoreAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.RestoreAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Restore``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.RestoreAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_autonomous_database(resp) + return resp + + @property + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + autonomous_database.AutonomousDatabase, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + exadata_infra.CloudExadataInfrastructure, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cloud_vm_cluster( + self, + ) -> Callable[[oracledatabase.GetCloudVmClusterRequest], vm_cluster.CloudVmCluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + oracledatabase.ListAutonomousDatabaseBackupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + oracledatabase.ListAutonomousDatabasesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + oracledatabase.ListAutonomousDbVersionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + oracledatabase.ListCloudExadataInfrastructuresResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + oracledatabase.ListCloudVmClustersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], oracledatabase.ListDbNodesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], oracledatabase.ListDbServersResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + oracledatabase.ListDbSystemShapesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + oracledatabase.ListEntitlementsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], oracledatabase.ListGiVersionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(OracleDatabaseRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(OracleDatabaseRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OracleDatabaseRestTransport",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py new file mode 100644 index 000000000000..e5079e7c48c9 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .autonomous_db_version import AutonomousDbVersion +from .common import CustomerContact +from .db_node import DbNode, DbNodeProperties +from .db_server import DbServer, DbServerProperties +from .db_system_shape import DbSystemShape +from .entitlement import CloudAccountDetails, Entitlement +from .exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .gi_version import GiVersion +from .location_metadata import LocationMetadata +from .oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .vm_cluster import CloudVmCluster, CloudVmClusterProperties, DataCollectionOptions + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py new file mode 100644 index 000000000000..907ef93bc4d5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py @@ -0,0 +1,1421 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "GenerateType", + "State", + "OperationsInsightsState", + "DBWorkload", + "AutonomousDatabase", + "AutonomousDatabaseProperties", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "DatabaseConnectionStringProfile", + "AllConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseStandbySummary", + "ScheduledOperationDetails", + }, +) + + +class GenerateType(proto.Enum): + r"""The type of wallet generation. + + Values: + GENERATE_TYPE_UNSPECIFIED (0): + Default unspecified value. + ALL (1): + Used to generate wallet for all databases in + the region. + SINGLE (2): + Used to generate wallet for a single + database. + """ + GENERATE_TYPE_UNSPECIFIED = 0 + ALL = 1 + SINGLE = 2 + + +class State(proto.Enum): + r"""The various lifecycle states of the Autonomous Database. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the Autonomous Database is in + provisioning state. + AVAILABLE (2): + Indicates that the Autonomous Database is in + available state. + STOPPING (3): + Indicates that the Autonomous Database is in + stopping state. + STOPPED (4): + Indicates that the Autonomous Database is in + stopped state. + STARTING (5): + Indicates that the Autonomous Database is in + starting state. + TERMINATING (6): + Indicates that the Autonomous Database is in + terminating state. + TERMINATED (7): + Indicates that the Autonomous Database is in + terminated state. + UNAVAILABLE (8): + Indicates that the Autonomous Database is in + unavailable state. + RESTORE_IN_PROGRESS (9): + Indicates that the Autonomous Database + restore is in progress. + RESTORE_FAILED (10): + Indicates that the Autonomous Database failed + to restore. + BACKUP_IN_PROGRESS (11): + Indicates that the Autonomous Database backup + is in progress. + SCALE_IN_PROGRESS (12): + Indicates that the Autonomous Database scale + is in progress. + AVAILABLE_NEEDS_ATTENTION (13): + Indicates that the Autonomous Database is + available but needs attention state. + UPDATING (14): + Indicates that the Autonomous Database is in + updating state. + MAINTENANCE_IN_PROGRESS (15): + Indicates that the Autonomous Database's + maintenance is in progress state. + RESTARTING (16): + Indicates that the Autonomous Database is in + restarting state. + RECREATING (17): + Indicates that the Autonomous Database is in + recreating state. + ROLE_CHANGE_IN_PROGRESS (18): + Indicates that the Autonomous Database's role + change is in progress state. + UPGRADING (19): + Indicates that the Autonomous Database is in + upgrading state. + INACCESSIBLE (20): + Indicates that the Autonomous Database is in + inaccessible state. + STANDBY (21): + Indicates that the Autonomous Database is in + standby state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + STOPPING = 3 + STOPPED = 4 + STARTING = 5 + TERMINATING = 6 + TERMINATED = 7 + UNAVAILABLE = 8 + RESTORE_IN_PROGRESS = 9 + RESTORE_FAILED = 10 + BACKUP_IN_PROGRESS = 11 + SCALE_IN_PROGRESS = 12 + AVAILABLE_NEEDS_ATTENTION = 13 + UPDATING = 14 + MAINTENANCE_IN_PROGRESS = 15 + RESTARTING = 16 + RECREATING = 17 + ROLE_CHANGE_IN_PROGRESS = 18 + UPGRADING = 19 + INACCESSIBLE = 20 + STANDBY = 21 + + +class OperationsInsightsState(proto.Enum): + r"""The state of the Operations Insights for this Autonomous + Database. + + Values: + OPERATIONS_INSIGHTS_STATE_UNSPECIFIED (0): + Default unspecified value. + ENABLING (1): + Enabling status for operation insights. + ENABLED (2): + Enabled status for operation insights. + DISABLING (3): + Disabling status for operation insights. + NOT_ENABLED (4): + Not Enabled status for operation insights. + FAILED_ENABLING (5): + Failed enabling status for operation + insights. + FAILED_DISABLING (6): + Failed disabling status for operation + insights. + """ + OPERATIONS_INSIGHTS_STATE_UNSPECIFIED = 0 + ENABLING = 1 + ENABLED = 2 + DISABLING = 3 + NOT_ENABLED = 4 + FAILED_ENABLING = 5 + FAILED_DISABLING = 6 + + +class DBWorkload(proto.Enum): + r"""The various states available for the Autonomous Database + workload type. + + Values: + DB_WORKLOAD_UNSPECIFIED (0): + Default unspecified value. + OLTP (1): + Autonomous Transaction Processing database. + DW (2): + Autonomous Data Warehouse database. + AJD (3): + Autonomous JSON Database. + APEX (4): + Autonomous Database with the Oracle APEX + Application Development workload type. + """ + DB_WORKLOAD_UNSPECIFIED = 0 + OLTP = 1 + DW = 2 + AJD = 3 + APEX = 4 + + +class AutonomousDatabase(proto.Message): + r"""Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database resource in + the following format: + projects/{project}/locations/{region}/autonomousDatabases/{autonomous_database} + database (str): + Optional. The name of the Autonomous + Database. The database name must be unique in + the project. The name must begin with a letter + and can contain a maximum of 30 alphanumeric + characters. + display_name (str): + Optional. The display name for the Autonomous + Database. The name does not have to be unique + within your project. + entitlement_id (str): + Output only. The ID of the subscription + entitlement associated with the Autonomous + Database. + admin_password (str): + Optional. The password for the default ADMIN + user. + properties (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties): + Optional. The properties of the Autonomous + Database. + labels (MutableMapping[str, str]): + Optional. The labels or tags associated with + the Autonomous Database. + network (str): + Required. The name of the VPC network used by + the Autonomous Database in the following format: + projects/{project}/global/networks/{network} + cidr (str): + Required. The subnet CIDR range for the + Autonmous Database. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the + Autonomous Database was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=5, + ) + admin_password: str = proto.Field( + proto.STRING, + number=6, + ) + properties: "AutonomousDatabaseProperties" = proto.Field( + proto.MESSAGE, + number=7, + message="AutonomousDatabaseProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + network: str = proto.Field( + proto.STRING, + number=9, + ) + cidr: str = proto.Field( + proto.STRING, + number=10, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + + +class AutonomousDatabaseProperties(proto.Message): + r"""The properties of an Autonomous Database. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ocid (str): + Output only. OCID of the Autonomous Database. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + compute_count (float): + Optional. The number of compute servers for + the Autonomous Database. + cpu_core_count (int): + Optional. The number of CPU cores to be made + available to the database. + data_storage_size_tb (int): + Optional. The size of the data stored in the + database, in terabytes. + data_storage_size_gb (int): + Optional. The size of the data stored in the + database, in gigabytes. + db_workload (google.cloud.oracledatabase_v1.types.DBWorkload): + Required. The workload type of the Autonomous + Database. + db_edition (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DatabaseEdition): + Optional. The edition of the Autonomous + Databases. + character_set (str): + Optional. The character set for the + Autonomous Database. The default is AL32UTF8. + n_character_set (str): + Optional. The national character set for the + Autonomous Database. The default is AL16UTF16. + private_endpoint_ip (str): + Optional. The private endpoint IP address for + the Autonomous Database. + private_endpoint_label (str): + Optional. The private endpoint label for the + Autonomous Database. + db_version (str): + Optional. The Oracle Database version for the + Autonomous Database. + is_auto_scaling_enabled (bool): + Optional. This field indicates if auto + scaling is enabled for the Autonomous Database + CPU core count. + is_storage_auto_scaling_enabled (bool): + Optional. This field indicates if auto + scaling is enabled for the Autonomous Database + storage. + license_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.LicenseType): + Required. The license type used for the + Autonomous Database. + customer_contacts (MutableSequence[google.cloud.oracledatabase_v1.types.CustomerContact]): + Optional. The list of customer contacts. + secret_id (str): + Optional. The ID of the Oracle Cloud + Infrastructure vault secret. + vault_id (str): + Optional. The ID of the Oracle Cloud + Infrastructure vault. + maintenance_schedule_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.MaintenanceScheduleType): + Optional. The maintenance schedule of the + Autonomous Database. + mtls_connection_required (bool): + Optional. This field specifies if the + Autonomous Database requires mTLS connections. + backup_retention_period_days (int): + Optional. The retention period for the + Autonomous Database. This field is specified in + days, can range from 1 day to 60 days, and has a + default value of 60 days. + actual_used_data_storage_size_tb (float): + Output only. The amount of storage currently + being used for user and system data, in + terabytes. + allocated_storage_size_tb (float): + Output only. The amount of storage currently + allocated for the database tables and billed + for, rounded up in terabytes. + apex_details (google.cloud.oracledatabase_v1.types.AutonomousDatabaseApex): + Output only. The details for the Oracle APEX + Application Development. + are_primary_allowlisted_ips_used (bool): + Output only. This field indicates the status + of Data Guard and Access control for the + Autonomous Database. The field's value is null + if Data Guard is disabled or Access Control is + disabled. The field's value is TRUE if both Data + Guard and Access Control are enabled, and the + Autonomous Database is using primary IP access + control list (ACL) for standby. The field's + value is FALSE if both Data Guard and Access + Control are enabled, and the Autonomous Database + is using a different IP access control list + (ACL) for standby compared to primary. + + This field is a member of `oneof`_ ``_are_primary_allowlisted_ips_used``. + lifecycle_details (str): + Output only. The details of the current + lifestyle state of the Autonomous Database. + state (google.cloud.oracledatabase_v1.types.State): + Output only. The current lifecycle state of + the Autonomous Database. + autonomous_container_database_id (str): + Output only. The Autonomous Container + Database OCID. + available_upgrade_versions (MutableSequence[str]): + Output only. The list of available Oracle + Database upgrade versions for an Autonomous + Database. + connection_strings (google.cloud.oracledatabase_v1.types.AutonomousDatabaseConnectionStrings): + Output only. The connection strings used to + connect to an Autonomous Database. + connection_urls (google.cloud.oracledatabase_v1.types.AutonomousDatabaseConnectionUrls): + Output only. The Oracle Connection URLs for + an Autonomous Database. + failed_data_recovery_duration (google.protobuf.duration_pb2.Duration): + Output only. This field indicates the number + of seconds of data loss during a Data Guard + failover. + memory_table_gbs (int): + Output only. The memory assigned to in-memory + tables in an Autonomous Database. + is_local_data_guard_enabled (bool): + Output only. This field indicates whether the + Autonomous Database has local (in-region) Data + Guard enabled. + local_adg_auto_failover_max_data_loss_limit (int): + Output only. This field indicates the maximum + data loss limit for an Autonomous Database, in + seconds. + local_standby_db (google.cloud.oracledatabase_v1.types.AutonomousDatabaseStandbySummary): + Output only. The details of the Autonomous + Data Guard standby database. + memory_per_oracle_compute_unit_gbs (int): + Output only. The amount of memory enabled per + ECPU, in gigabytes. + local_disaster_recovery_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.LocalDisasterRecoveryType): + Output only. This field indicates the local + disaster recovery (DR) type of an Autonomous + Database. + data_safe_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DataSafeState): + Output only. The current state of the Data + Safe registration for the Autonomous Database. + database_management_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DatabaseManagementState): + Output only. The current state of database + management for the Autonomous Database. + open_mode (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.OpenMode): + Output only. This field indicates the current + mode of the Autonomous Database. + operations_insights_state (google.cloud.oracledatabase_v1.types.OperationsInsightsState): + Output only. This field indicates the state + of Operations Insights for the Autonomous + Database. + peer_db_ids (MutableSequence[str]): + Output only. The list of OCIDs of standby + databases located in Autonomous Data Guard + remote regions that are associated with the + source database. + permission_level (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.PermissionLevel): + Output only. The permission level of the + Autonomous Database. + private_endpoint (str): + Output only. The private endpoint for the + Autonomous Database. + refreshable_mode (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.RefreshableMode): + Output only. The refresh mode of the cloned + Autonomous Database. + refreshable_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.RefreshableState): + Output only. The refresh State of the clone. + role (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.Role): + Output only. The Data Guard role of the + Autonomous Database. + scheduled_operation_details (MutableSequence[google.cloud.oracledatabase_v1.types.ScheduledOperationDetails]): + Output only. The list and details of the + scheduled operations of the Autonomous Database. + sql_web_developer_url (str): + Output only. The SQL Web Developer URL for + the Autonomous Database. + supported_clone_regions (MutableSequence[str]): + Output only. The list of available regions + that can be used to create a clone for the + Autonomous Database. + used_data_storage_size_tbs (int): + Output only. The storage space used by + Autonomous Database, in gigabytes. + oci_url (str): + Output only. The Oracle Cloud Infrastructure + link for the Autonomous Database. + total_auto_backup_storage_size_gbs (float): + Output only. The storage space used by + automatic backups of Autonomous Database, in + gigabytes. + next_long_term_backup_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The long term backup schedule of + the Autonomous Database. + maintenance_begin_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time when + maintenance will begin. + maintenance_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time when + maintenance will end. + """ + + class DatabaseEdition(proto.Enum): + r"""The editions available for the Autonomous Database. + + Values: + DATABASE_EDITION_UNSPECIFIED (0): + Default unspecified value. + STANDARD_EDITION (1): + Standard Database Edition + ENTERPRISE_EDITION (2): + Enterprise Database Edition + """ + DATABASE_EDITION_UNSPECIFIED = 0 + STANDARD_EDITION = 1 + ENTERPRISE_EDITION = 2 + + class LicenseType(proto.Enum): + r"""The license types available for the Autonomous Database. + + Values: + LICENSE_TYPE_UNSPECIFIED (0): + Unspecified + LICENSE_INCLUDED (1): + License included part of offer + BRING_YOUR_OWN_LICENSE (2): + Bring your own license + """ + LICENSE_TYPE_UNSPECIFIED = 0 + LICENSE_INCLUDED = 1 + BRING_YOUR_OWN_LICENSE = 2 + + class MaintenanceScheduleType(proto.Enum): + r"""The available maintenance schedules for the Autonomous + Database. + + Values: + MAINTENANCE_SCHEDULE_TYPE_UNSPECIFIED (0): + Default unspecified value. + EARLY (1): + An EARLY maintenance schedule patches the + database before the regular scheduled + maintenance. + REGULAR (2): + A REGULAR maintenance schedule follows the + normal maintenance cycle. + """ + MAINTENANCE_SCHEDULE_TYPE_UNSPECIFIED = 0 + EARLY = 1 + REGULAR = 2 + + class LocalDisasterRecoveryType(proto.Enum): + r"""The types of local disaster recovery available for an + Autonomous Database. + + Values: + LOCAL_DISASTER_RECOVERY_TYPE_UNSPECIFIED (0): + Default unspecified value. + ADG (1): + Autonomous Data Guard recovery. + BACKUP_BASED (2): + Backup based recovery. + """ + LOCAL_DISASTER_RECOVERY_TYPE_UNSPECIFIED = 0 + ADG = 1 + BACKUP_BASED = 2 + + class DataSafeState(proto.Enum): + r"""Varies states of the Data Safe registration for the + Autonomous Database. + + Values: + DATA_SAFE_STATE_UNSPECIFIED (0): + Default unspecified value. + REGISTERING (1): + Registering data safe state. + REGISTERED (2): + Registered data safe state. + DEREGISTERING (3): + Deregistering data safe state. + NOT_REGISTERED (4): + Not registered data safe state. + FAILED (5): + Failed data safe state. + """ + DATA_SAFE_STATE_UNSPECIFIED = 0 + REGISTERING = 1 + REGISTERED = 2 + DEREGISTERING = 3 + NOT_REGISTERED = 4 + FAILED = 5 + + class DatabaseManagementState(proto.Enum): + r"""The different states of database management for an Autonomous + Database. + + Values: + DATABASE_MANAGEMENT_STATE_UNSPECIFIED (0): + Default unspecified value. + ENABLING (1): + Enabling Database Management state + ENABLED (2): + Enabled Database Management state + DISABLING (3): + Disabling Database Management state + NOT_ENABLED (4): + Not Enabled Database Management state + FAILED_ENABLING (5): + Failed enabling Database Management state + FAILED_DISABLING (6): + Failed disabling Database Management state + """ + DATABASE_MANAGEMENT_STATE_UNSPECIFIED = 0 + ENABLING = 1 + ENABLED = 2 + DISABLING = 3 + NOT_ENABLED = 4 + FAILED_ENABLING = 5 + FAILED_DISABLING = 6 + + class OpenMode(proto.Enum): + r"""This field indicates the modes of an Autonomous Database. + + Values: + OPEN_MODE_UNSPECIFIED (0): + Default unspecified value. + READ_ONLY (1): + Read Only Mode + READ_WRITE (2): + Read Write Mode + """ + OPEN_MODE_UNSPECIFIED = 0 + READ_ONLY = 1 + READ_WRITE = 2 + + class PermissionLevel(proto.Enum): + r"""The types of permission levels for an Autonomous Database. + + Values: + PERMISSION_LEVEL_UNSPECIFIED (0): + Default unspecified value. + RESTRICTED (1): + Restricted mode allows access only by admin + users. + UNRESTRICTED (2): + Normal access. + """ + PERMISSION_LEVEL_UNSPECIFIED = 0 + RESTRICTED = 1 + UNRESTRICTED = 2 + + class RefreshableMode(proto.Enum): + r"""The refresh mode of the cloned Autonomous Database. + + Values: + REFRESHABLE_MODE_UNSPECIFIED (0): + The default unspecified value. + AUTOMATIC (1): + AUTOMATIC indicates that the cloned database + is automatically refreshed with data from the + source Autonomous Database. + MANUAL (2): + MANUAL indicates that the cloned database is + manually refreshed with data from the source + Autonomous Database. + """ + REFRESHABLE_MODE_UNSPECIFIED = 0 + AUTOMATIC = 1 + MANUAL = 2 + + class RefreshableState(proto.Enum): + r"""The refresh state of the cloned Autonomous Database. + + Values: + REFRESHABLE_STATE_UNSPECIFIED (0): + Default unspecified value. + REFRESHING (1): + Refreshing + NOT_REFRESHING (2): + Not refreshed + """ + REFRESHABLE_STATE_UNSPECIFIED = 0 + REFRESHING = 1 + NOT_REFRESHING = 2 + + class Role(proto.Enum): + r"""The Data Guard role of the Autonomous Database. + + Values: + ROLE_UNSPECIFIED (0): + Default unspecified value. + PRIMARY (1): + Primary role + STANDBY (2): + Standby role + DISABLED_STANDBY (3): + Disabled standby role + BACKUP_COPY (4): + Backup copy role + SNAPSHOT_STANDBY (5): + Snapshot standby role + """ + ROLE_UNSPECIFIED = 0 + PRIMARY = 1 + STANDBY = 2 + DISABLED_STANDBY = 3 + BACKUP_COPY = 4 + SNAPSHOT_STANDBY = 5 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + compute_count: float = proto.Field( + proto.FLOAT, + number=2, + ) + cpu_core_count: int = proto.Field( + proto.INT32, + number=3, + ) + data_storage_size_tb: int = proto.Field( + proto.INT32, + number=4, + ) + data_storage_size_gb: int = proto.Field( + proto.INT32, + number=63, + ) + db_workload: "DBWorkload" = proto.Field( + proto.ENUM, + number=5, + enum="DBWorkload", + ) + db_edition: DatabaseEdition = proto.Field( + proto.ENUM, + number=6, + enum=DatabaseEdition, + ) + character_set: str = proto.Field( + proto.STRING, + number=8, + ) + n_character_set: str = proto.Field( + proto.STRING, + number=9, + ) + private_endpoint_ip: str = proto.Field( + proto.STRING, + number=10, + ) + private_endpoint_label: str = proto.Field( + proto.STRING, + number=11, + ) + db_version: str = proto.Field( + proto.STRING, + number=12, + ) + is_auto_scaling_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + is_storage_auto_scaling_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + license_type: LicenseType = proto.Field( + proto.ENUM, + number=16, + enum=LicenseType, + ) + customer_contacts: MutableSequence[common.CustomerContact] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message=common.CustomerContact, + ) + secret_id: str = proto.Field( + proto.STRING, + number=18, + ) + vault_id: str = proto.Field( + proto.STRING, + number=19, + ) + maintenance_schedule_type: MaintenanceScheduleType = proto.Field( + proto.ENUM, + number=20, + enum=MaintenanceScheduleType, + ) + mtls_connection_required: bool = proto.Field( + proto.BOOL, + number=34, + ) + backup_retention_period_days: int = proto.Field( + proto.INT32, + number=57, + ) + actual_used_data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=21, + ) + allocated_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=22, + ) + apex_details: "AutonomousDatabaseApex" = proto.Field( + proto.MESSAGE, + number=23, + message="AutonomousDatabaseApex", + ) + are_primary_allowlisted_ips_used: bool = proto.Field( + proto.BOOL, + number=24, + optional=True, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=25, + ) + state: "State" = proto.Field( + proto.ENUM, + number=26, + enum="State", + ) + autonomous_container_database_id: str = proto.Field( + proto.STRING, + number=27, + ) + available_upgrade_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + connection_strings: "AutonomousDatabaseConnectionStrings" = proto.Field( + proto.MESSAGE, + number=29, + message="AutonomousDatabaseConnectionStrings", + ) + connection_urls: "AutonomousDatabaseConnectionUrls" = proto.Field( + proto.MESSAGE, + number=30, + message="AutonomousDatabaseConnectionUrls", + ) + failed_data_recovery_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=31, + message=duration_pb2.Duration, + ) + memory_table_gbs: int = proto.Field( + proto.INT32, + number=32, + ) + is_local_data_guard_enabled: bool = proto.Field( + proto.BOOL, + number=33, + ) + local_adg_auto_failover_max_data_loss_limit: int = proto.Field( + proto.INT32, + number=35, + ) + local_standby_db: "AutonomousDatabaseStandbySummary" = proto.Field( + proto.MESSAGE, + number=36, + message="AutonomousDatabaseStandbySummary", + ) + memory_per_oracle_compute_unit_gbs: int = proto.Field( + proto.INT32, + number=37, + ) + local_disaster_recovery_type: LocalDisasterRecoveryType = proto.Field( + proto.ENUM, + number=38, + enum=LocalDisasterRecoveryType, + ) + data_safe_state: DataSafeState = proto.Field( + proto.ENUM, + number=39, + enum=DataSafeState, + ) + database_management_state: DatabaseManagementState = proto.Field( + proto.ENUM, + number=40, + enum=DatabaseManagementState, + ) + open_mode: OpenMode = proto.Field( + proto.ENUM, + number=41, + enum=OpenMode, + ) + operations_insights_state: "OperationsInsightsState" = proto.Field( + proto.ENUM, + number=42, + enum="OperationsInsightsState", + ) + peer_db_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=43, + ) + permission_level: PermissionLevel = proto.Field( + proto.ENUM, + number=44, + enum=PermissionLevel, + ) + private_endpoint: str = proto.Field( + proto.STRING, + number=45, + ) + refreshable_mode: RefreshableMode = proto.Field( + proto.ENUM, + number=46, + enum=RefreshableMode, + ) + refreshable_state: RefreshableState = proto.Field( + proto.ENUM, + number=47, + enum=RefreshableState, + ) + role: Role = proto.Field( + proto.ENUM, + number=48, + enum=Role, + ) + scheduled_operation_details: MutableSequence[ + "ScheduledOperationDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=64, + message="ScheduledOperationDetails", + ) + sql_web_developer_url: str = proto.Field( + proto.STRING, + number=50, + ) + supported_clone_regions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=51, + ) + used_data_storage_size_tbs: int = proto.Field( + proto.INT32, + number=53, + ) + oci_url: str = proto.Field( + proto.STRING, + number=54, + ) + total_auto_backup_storage_size_gbs: float = proto.Field( + proto.FLOAT, + number=59, + ) + next_long_term_backup_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=60, + message=timestamp_pb2.Timestamp, + ) + maintenance_begin_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=65, + message=timestamp_pb2.Timestamp, + ) + maintenance_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=66, + message=timestamp_pb2.Timestamp, + ) + + +class AutonomousDatabaseApex(proto.Message): + r"""Oracle APEX Application Development. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseApex + + Attributes: + apex_version (str): + Output only. The Oracle APEX Application + Development version. + ords_version (str): + Output only. The Oracle REST Data Services + (ORDS) version. + """ + + apex_version: str = proto.Field( + proto.STRING, + number=1, + ) + ords_version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AutonomousDatabaseConnectionStrings(proto.Message): + r"""The connection string used to connect to the Autonomous + Database. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseConnectionStrings + + Attributes: + all_connection_strings (google.cloud.oracledatabase_v1.types.AllConnectionStrings): + Output only. Returns all connection strings + that can be used to connect to the Autonomous + Database. + dedicated (str): + Output only. The database service provides + the least level of resources to each SQL + statement, but supports the most number of + concurrent SQL statements. + high (str): + Output only. The database service provides + the highest level of resources to each SQL + statement. + low (str): + Output only. The database service provides + the least level of resources to each SQL + statement. + medium (str): + Output only. The database service provides a + lower level of resources to each SQL statement. + profiles (MutableSequence[google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile]): + Output only. A list of connection string + profiles to allow clients to group, filter, and + select values based on the structured metadata. + """ + + all_connection_strings: "AllConnectionStrings" = proto.Field( + proto.MESSAGE, + number=1, + message="AllConnectionStrings", + ) + dedicated: str = proto.Field( + proto.STRING, + number=2, + ) + high: str = proto.Field( + proto.STRING, + number=3, + ) + low: str = proto.Field( + proto.STRING, + number=4, + ) + medium: str = proto.Field( + proto.STRING, + number=5, + ) + profiles: MutableSequence["DatabaseConnectionStringProfile"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="DatabaseConnectionStringProfile", + ) + + +class DatabaseConnectionStringProfile(proto.Message): + r"""The connection string profile to allow clients to group. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/DatabaseConnectionStringProfile + + Attributes: + consumer_group (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.ConsumerGroup): + Output only. The current consumer group being + used by the connection. + display_name (str): + Output only. The display name for the + database connection. + host_format (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.HostFormat): + Output only. The host name format being + currently used in connection string. + is_regional (bool): + Output only. This field indicates if the + connection string is regional and is only + applicable for cross-region Data Guard. + protocol (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.Protocol): + Output only. The protocol being used by the + connection. + session_mode (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.SessionMode): + Output only. The current session mode of the + connection. + syntax_format (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.SyntaxFormat): + Output only. The syntax of the connection + string. + tls_authentication (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.TLSAuthentication): + Output only. This field indicates the TLS + authentication type of the connection. + value (str): + Output only. The value of the connection + string. + """ + + class ConsumerGroup(proto.Enum): + r"""The various consumer groups available in the connection + string profile. + + Values: + CONSUMER_GROUP_UNSPECIFIED (0): + Default unspecified value. + HIGH (1): + High consumer group. + MEDIUM (2): + Medium consumer group. + LOW (3): + Low consumer group. + TP (4): + TP consumer group. + TPURGENT (5): + TPURGENT consumer group. + """ + CONSUMER_GROUP_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM = 2 + LOW = 3 + TP = 4 + TPURGENT = 5 + + class HostFormat(proto.Enum): + r"""The host name format being used in the connection string. + + Values: + HOST_FORMAT_UNSPECIFIED (0): + Default unspecified value. + FQDN (1): + FQDN + IP (2): + IP + """ + HOST_FORMAT_UNSPECIFIED = 0 + FQDN = 1 + IP = 2 + + class Protocol(proto.Enum): + r"""The protocol being used by the connection. + + Values: + PROTOCOL_UNSPECIFIED (0): + Default unspecified value. + TCP (1): + Tcp + TCPS (2): + Tcps + """ + PROTOCOL_UNSPECIFIED = 0 + TCP = 1 + TCPS = 2 + + class SessionMode(proto.Enum): + r"""The session mode of the connection. + + Values: + SESSION_MODE_UNSPECIFIED (0): + Default unspecified value. + DIRECT (1): + Direct + INDIRECT (2): + Indirect + """ + SESSION_MODE_UNSPECIFIED = 0 + DIRECT = 1 + INDIRECT = 2 + + class SyntaxFormat(proto.Enum): + r"""Specifies syntax of the connection string. + + Values: + SYNTAX_FORMAT_UNSPECIFIED (0): + Default unspecified value. + LONG (1): + Long + EZCONNECT (2): + Ezconnect + EZCONNECTPLUS (3): + Ezconnectplus + """ + SYNTAX_FORMAT_UNSPECIFIED = 0 + LONG = 1 + EZCONNECT = 2 + EZCONNECTPLUS = 3 + + class TLSAuthentication(proto.Enum): + r"""This field indicates the TLS authentication type of the + connection. + + Values: + TLS_AUTHENTICATION_UNSPECIFIED (0): + Default unspecified value. + SERVER (1): + Server + MUTUAL (2): + Mutual + """ + TLS_AUTHENTICATION_UNSPECIFIED = 0 + SERVER = 1 + MUTUAL = 2 + + consumer_group: ConsumerGroup = proto.Field( + proto.ENUM, + number=1, + enum=ConsumerGroup, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + host_format: HostFormat = proto.Field( + proto.ENUM, + number=3, + enum=HostFormat, + ) + is_regional: bool = proto.Field( + proto.BOOL, + number=4, + ) + protocol: Protocol = proto.Field( + proto.ENUM, + number=5, + enum=Protocol, + ) + session_mode: SessionMode = proto.Field( + proto.ENUM, + number=6, + enum=SessionMode, + ) + syntax_format: SyntaxFormat = proto.Field( + proto.ENUM, + number=7, + enum=SyntaxFormat, + ) + tls_authentication: TLSAuthentication = proto.Field( + proto.ENUM, + number=8, + enum=TLSAuthentication, + ) + value: str = proto.Field( + proto.STRING, + number=9, + ) + + +class AllConnectionStrings(proto.Message): + r"""A list of all connection strings that can be used to connect + to the Autonomous Database. + + Attributes: + high (str): + Output only. The database service provides + the highest level of resources to each SQL + statement. + low (str): + Output only. The database service provides + the least level of resources to each SQL + statement. + medium (str): + Output only. The database service provides a + lower level of resources to each SQL statement. + """ + + high: str = proto.Field( + proto.STRING, + number=1, + ) + low: str = proto.Field( + proto.STRING, + number=2, + ) + medium: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AutonomousDatabaseConnectionUrls(proto.Message): + r"""The URLs for accessing Oracle Application Express (APEX) and + SQL Developer Web with a browser from a Compute instance. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseConnectionUrls + + Attributes: + apex_uri (str): + Output only. Oracle Application Express + (APEX) URL. + database_transforms_uri (str): + Output only. The URL of the Database + Transforms for the Autonomous Database. + graph_studio_uri (str): + Output only. The URL of the Graph Studio for + the Autonomous Database. + machine_learning_notebook_uri (str): + Output only. The URL of the Oracle Machine + Learning (OML) Notebook for the Autonomous + Database. + machine_learning_user_management_uri (str): + Output only. The URL of Machine Learning user + management the Autonomous Database. + mongo_db_uri (str): + Output only. The URL of the MongoDB API for + the Autonomous Database. + ords_uri (str): + Output only. The Oracle REST Data Services + (ORDS) URL of the Web Access for the Autonomous + Database. + sql_dev_web_uri (str): + Output only. The URL of the Oracle SQL + Developer Web for the Autonomous Database. + """ + + apex_uri: str = proto.Field( + proto.STRING, + number=1, + ) + database_transforms_uri: str = proto.Field( + proto.STRING, + number=2, + ) + graph_studio_uri: str = proto.Field( + proto.STRING, + number=3, + ) + machine_learning_notebook_uri: str = proto.Field( + proto.STRING, + number=4, + ) + machine_learning_user_management_uri: str = proto.Field( + proto.STRING, + number=5, + ) + mongo_db_uri: str = proto.Field( + proto.STRING, + number=6, + ) + ords_uri: str = proto.Field( + proto.STRING, + number=7, + ) + sql_dev_web_uri: str = proto.Field( + proto.STRING, + number=8, + ) + + +class AutonomousDatabaseStandbySummary(proto.Message): + r"""Autonomous Data Guard standby database details. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseStandbySummary + + Attributes: + lag_time_duration (google.protobuf.duration_pb2.Duration): + Output only. The amount of time, in seconds, + that the data of the standby database lags in + comparison to the data of the primary database. + lifecycle_details (str): + Output only. The additional details about the + current lifecycle state of the Autonomous + Database. + state (google.cloud.oracledatabase_v1.types.State): + Output only. The current lifecycle state of + the Autonomous Database. + data_guard_role_changed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the Autonomous + Data Guard role was switched for the standby + Autonomous Database. + disaster_recovery_role_changed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the Disaster + Recovery role was switched for the standby + Autonomous Database. + """ + + lag_time_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=2, + ) + state: "State" = proto.Field( + proto.ENUM, + number=3, + enum="State", + ) + data_guard_role_changed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + disaster_recovery_role_changed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ScheduledOperationDetails(proto.Message): + r"""Details of scheduled operation. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/ScheduledOperationDetails + + Attributes: + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Output only. Day of week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Output only. Auto start time. + stop_time (google.type.timeofday_pb2.TimeOfDay): + Output only. Auto stop time. + """ + + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + stop_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=5, + message=timeofday_pb2.TimeOfDay, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py new file mode 100644 index 000000000000..dd6bfd509fce --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "AutonomousDatabaseCharacterSet", + }, +) + + +class AutonomousDatabaseCharacterSet(proto.Message): + r"""Details of the Autonomous Database character set resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabaseCharacterSets/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Character + Set resource in the following format: + projects/{project}/locations/{region}/autonomousDatabaseCharacterSets/{autonomous_database_character_set} + character_set_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseCharacterSet.CharacterSetType): + Output only. The character set type for the + Autonomous Database. + character_set (str): + Output only. The character set name for the + Autonomous Database which is the ID in the + resource name. + """ + + class CharacterSetType(proto.Enum): + r"""The type of character set an Autonomous Database can have. + + Values: + CHARACTER_SET_TYPE_UNSPECIFIED (0): + Character set type is not specified. + DATABASE (1): + Character set type is set to database. + NATIONAL (2): + Character set type is set to national. + """ + CHARACTER_SET_TYPE_UNSPECIFIED = 0 + DATABASE = 1 + NATIONAL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + character_set_type: CharacterSetType = proto.Field( + proto.ENUM, + number=2, + enum=CharacterSetType, + ) + character_set: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py new file mode 100644 index 000000000000..1f15eb50e02a --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + }, +) + + +class AutonomousDatabaseBackup(proto.Message): + r"""Details of the Autonomous Database Backup resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabaseBackup/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Backup + resource with the format: + projects/{project}/locations/{region}/autonomousDatabaseBackups/{autonomous_database_backup} + autonomous_database (str): + Required. The name of the Autonomous Database resource for + which the backup is being created. Format: + projects/{project}/locations/{region}/autonomousDatabases/{autonomous_database} + display_name (str): + Optional. User friendly name for the Backup. + The name does not have to be unique. + properties (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties): + Optional. Various properties of the backup. + labels (MutableMapping[str, str]): + Optional. labels or tags associated with the + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + autonomous_database: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + properties: "AutonomousDatabaseBackupProperties" = proto.Field( + proto.MESSAGE, + number=4, + message="AutonomousDatabaseBackupProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class AutonomousDatabaseBackupProperties(proto.Message): + r"""Properties of the Autonomous Database Backup resource. + + Attributes: + ocid (str): + Output only. OCID of the Autonomous Database + backup. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + retention_period_days (int): + Optional. Retention period in days for the + backup. + compartment_id (str): + Output only. The OCID of the compartment. + database_size_tb (float): + Output only. The quantity of data in the + database, in terabytes. + db_version (str): + Output only. A valid Oracle Database version + for Autonomous Database. + is_long_term_backup (bool): + Output only. Indicates if the backup is long + term backup. + is_automatic_backup (bool): + Output only. Indicates if the backup is + automatic or user initiated. + is_restorable (bool): + Output only. Indicates if the backup can be + used to restore the Autonomous Database. + key_store_id (str): + Optional. The OCID of the key store of Oracle + Vault. + key_store_wallet (str): + Optional. The wallet name for Oracle Key + Vault. + kms_key_id (str): + Optional. The OCID of the key container that + is used as the master encryption key in database + transparent data encryption (TDE) operations. + kms_key_version_id (str): + Optional. The OCID of the key container + version that is used in database transparent + data encryption (TDE) operations KMS Key can + have multiple key versions. If none is + specified, the current key version (latest) of + the Key Id is used for the operation. Autonomous + Database Serverless does not use key versions, + hence is not applicable for Autonomous Database + Serverless instances. + lifecycle_details (str): + Output only. Additional information about the + current lifecycle state. + lifecycle_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties.State): + Output only. The lifecycle state of the + backup. + size_tb (float): + Output only. The backup size in terabytes. + available_till_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp until when the backup + will be available. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the backup + completed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the backup + started. + type_ (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties.Type): + Output only. The type of the backup. + vault_id (str): + Optional. The OCID of the vault. + """ + + class State(proto.Enum): + r"""// The various lifecycle states of the Autonomous Database + Backup. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + CREATING (1): + Indicates that the resource is in creating + state. + ACTIVE (2): + Indicates that the resource is in active + state. + DELETING (3): + Indicates that the resource is in deleting + state. + DELETED (4): + Indicates that the resource is in deleted + state. + FAILED (6): + Indicates that the resource is in failed + state. + UPDATING (7): + Indicates that the resource is in updating + state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + DELETED = 4 + FAILED = 6 + UPDATING = 7 + + class Type(proto.Enum): + r"""The type of the backup. + + Values: + TYPE_UNSPECIFIED (0): + Default unspecified value. + INCREMENTAL (1): + Incremental backups. + FULL (2): + Full backups. + LONG_TERM (3): + Long term backups. + """ + TYPE_UNSPECIFIED = 0 + INCREMENTAL = 1 + FULL = 2 + LONG_TERM = 3 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + retention_period_days: int = proto.Field( + proto.INT32, + number=2, + ) + compartment_id: str = proto.Field( + proto.STRING, + number=3, + ) + database_size_tb: float = proto.Field( + proto.FLOAT, + number=4, + ) + db_version: str = proto.Field( + proto.STRING, + number=5, + ) + is_long_term_backup: bool = proto.Field( + proto.BOOL, + number=6, + ) + is_automatic_backup: bool = proto.Field( + proto.BOOL, + number=7, + ) + is_restorable: bool = proto.Field( + proto.BOOL, + number=8, + ) + key_store_id: str = proto.Field( + proto.STRING, + number=9, + ) + key_store_wallet: str = proto.Field( + proto.STRING, + number=10, + ) + kms_key_id: str = proto.Field( + proto.STRING, + number=11, + ) + kms_key_version_id: str = proto.Field( + proto.STRING, + number=12, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=13, + ) + lifecycle_state: State = proto.Field( + proto.ENUM, + number=14, + enum=State, + ) + size_tb: float = proto.Field( + proto.FLOAT, + number=15, + ) + available_till_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=17, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=19, + enum=Type, + ) + vault_id: str = proto.Field( + proto.STRING, + number=20, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py new file mode 100644 index 000000000000..05189694df98 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import autonomous_database + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "AutonomousDbVersion", + }, +) + + +class AutonomousDbVersion(proto.Message): + r"""Details of the Autonomous Database version. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDbVersionSummary/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Version + resource with the format: + projects/{project}/locations/{region}/autonomousDbVersions/{autonomous_db_version} + version (str): + Output only. An Oracle Database version for + Autonomous Database. + db_workload (google.cloud.oracledatabase_v1.types.DBWorkload): + Output only. The Autonomous Database workload + type. + workload_uri (str): + Output only. A URL that points to a detailed + description of the Autonomous Database version. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + db_workload: autonomous_database.DBWorkload = proto.Field( + proto.ENUM, + number=4, + enum=autonomous_database.DBWorkload, + ) + workload_uri: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py new file mode 100644 index 000000000000..2357b454221c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "CustomerContact", + }, +) + + +class CustomerContact(proto.Message): + r"""The CustomerContact reference as defined by Oracle. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/CustomerContact + + Attributes: + email (str): + Required. The email address used by Oracle to + send notifications regarding databases and + infrastructure. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py new file mode 100644 index 000000000000..4f0a7175908f --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "DbNode", + "DbNodeProperties", + }, +) + + +class DbNode(proto.Message): + r"""Details of the database node resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbNode/ + + Attributes: + name (str): + Identifier. The name of the database node resource in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node} + properties (google.cloud.oracledatabase_v1.types.DbNodeProperties): + Optional. Various properties of the database + node. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + properties: "DbNodeProperties" = proto.Field( + proto.MESSAGE, + number=3, + message="DbNodeProperties", + ) + + +class DbNodeProperties(proto.Message): + r"""Various properties and settings associated with Db node. + + Attributes: + ocid (str): + Output only. OCID of database node. + ocpu_count (int): + Optional. OCPU count per database node. + memory_size_gb (int): + Memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per database node. + db_server_ocid (str): + Optional. Database server OCID. + hostname (str): + Optional. DNS + state (google.cloud.oracledatabase_v1.types.DbNodeProperties.State): + Output only. State of the database node. + total_cpu_core_count (int): + Total CPU core count of the database node. + """ + + class State(proto.Enum): + r"""The various lifecycle states of the database node. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the resource is in + provisioning state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UPDATING (3): + Indicates that the resource is in updating + state. + STOPPING (4): + Indicates that the resource is in stopping + state. + STOPPED (5): + Indicates that the resource is in stopped + state. + STARTING (6): + Indicates that the resource is in starting + state. + TERMINATING (7): + Indicates that the resource is in terminating + state. + TERMINATED (8): + Indicates that the resource is in terminated + state. + FAILED (9): + Indicates that the resource is in failed + state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + STOPPING = 4 + STOPPED = 5 + STARTING = 6 + TERMINATING = 7 + TERMINATED = 8 + FAILED = 9 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + ocpu_count: int = proto.Field( + proto.INT32, + number=2, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=3, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + db_server_ocid: str = proto.Field( + proto.STRING, + number=5, + ) + hostname: str = proto.Field( + proto.STRING, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + total_cpu_core_count: int = proto.Field( + proto.INT32, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py new file mode 100644 index 000000000000..ac60975560d4 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "DbServer", + "DbServerProperties", + }, +) + + +class DbServer(proto.Message): + r"""Details of the database server resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbServer/ + + Attributes: + name (str): + Identifier. The name of the database server resource with + the format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server} + display_name (str): + Optional. User friendly name for this + resource. + properties (google.cloud.oracledatabase_v1.types.DbServerProperties): + Optional. Various properties of the database + server. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + properties: "DbServerProperties" = proto.Field( + proto.MESSAGE, + number=3, + message="DbServerProperties", + ) + + +class DbServerProperties(proto.Message): + r"""Various properties and settings associated with Exadata + database server. + + Attributes: + ocid (str): + Output only. OCID of database server. + ocpu_count (int): + Optional. OCPU count per database. + max_ocpu_count (int): + Optional. Maximum OCPU count per database. + memory_size_gb (int): + Optional. Memory allocated in GBs. + max_memory_size_gb (int): + Optional. Maximum memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per VM. + max_db_node_storage_size_gb (int): + Optional. Maximum local storage per VM. + vm_count (int): + Optional. Vm count per database. + state (google.cloud.oracledatabase_v1.types.DbServerProperties.State): + Output only. State of the database server. + db_node_ids (MutableSequence[str]): + Output only. OCID of database nodes + associated with the database server. + """ + + class State(proto.Enum): + r"""The various lifecycle states of the database server. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + CREATING (1): + Indicates that the resource is in creating + state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UNAVAILABLE (3): + Indicates that the resource is in unavailable + state. + DELETING (4): + Indicates that the resource is in deleting + state. + DELETED (5): + Indicates that the resource is in deleted + state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + AVAILABLE = 2 + UNAVAILABLE = 3 + DELETING = 4 + DELETED = 5 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + ocpu_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_ocpu_count: int = proto.Field( + proto.INT32, + number=3, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + max_memory_size_gb: int = proto.Field( + proto.INT32, + number=5, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=6, + ) + max_db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=7, + ) + vm_count: int = proto.Field( + proto.INT32, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + db_node_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py new file mode 100644 index 000000000000..7429af46b6cc --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "DbSystemShape", + }, +) + + +class DbSystemShape(proto.Message): + r"""Details of the Database System Shapes resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbSystemShapeSummary/ + + Attributes: + name (str): + Identifier. The name of the Database System Shape resource + with the format: + projects/{project}/locations/{region}/dbSystemShapes/{db_system_shape} + shape (str): + Optional. shape + min_node_count (int): + Optional. Minimum number of database servers. + max_node_count (int): + Optional. Maximum number of database servers. + min_storage_count (int): + Optional. Minimum number of storage servers. + max_storage_count (int): + Optional. Maximum number of storage servers. + available_core_count_per_node (int): + Optional. Number of cores per node. + available_memory_per_node_gb (int): + Optional. Memory per database server node in + gigabytes. + available_data_storage_tb (int): + Optional. Storage per storage server in + terabytes. + min_core_count_per_node (int): + Optional. Minimum core count per node. + min_memory_per_node_gb (int): + Optional. Minimum memory per node in + gigabytes. + min_db_node_storage_per_node_gb (int): + Optional. Minimum node storage per database + server in gigabytes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + shape: str = proto.Field( + proto.STRING, + number=2, + ) + min_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=4, + ) + min_storage_count: int = proto.Field( + proto.INT32, + number=5, + ) + max_storage_count: int = proto.Field( + proto.INT32, + number=6, + ) + available_core_count_per_node: int = proto.Field( + proto.INT32, + number=7, + ) + available_memory_per_node_gb: int = proto.Field( + proto.INT32, + number=8, + ) + available_data_storage_tb: int = proto.Field( + proto.INT32, + number=9, + ) + min_core_count_per_node: int = proto.Field( + proto.INT32, + number=10, + ) + min_memory_per_node_gb: int = proto.Field( + proto.INT32, + number=11, + ) + min_db_node_storage_per_node_gb: int = proto.Field( + proto.INT32, + number=12, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py new file mode 100644 index 000000000000..01b82a412c0b --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "Entitlement", + "CloudAccountDetails", + }, +) + + +class Entitlement(proto.Message): + r"""Details of the Entitlement resource. + + Attributes: + name (str): + Identifier. The name of the Entitlement + resource with the format: + projects/{project}/locations/{region}/entitlements/{entitlement} + cloud_account_details (google.cloud.oracledatabase_v1.types.CloudAccountDetails): + Details of the OCI Cloud Account. + entitlement_id (str): + Output only. Google Cloud Marketplace order + ID (aka entitlement ID) + state (google.cloud.oracledatabase_v1.types.Entitlement.State): + Output only. Entitlement State. + """ + + class State(proto.Enum): + r"""The various lifecycle states of the subscription. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + ACCOUNT_NOT_LINKED (1): + Account not linked. + ACCOUNT_NOT_ACTIVE (2): + Account is linked but not active. + ACTIVE (3): + Entitlement and Account are active. + """ + STATE_UNSPECIFIED = 0 + ACCOUNT_NOT_LINKED = 1 + ACCOUNT_NOT_ACTIVE = 2 + ACTIVE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_account_details: "CloudAccountDetails" = proto.Field( + proto.MESSAGE, + number=2, + message="CloudAccountDetails", + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class CloudAccountDetails(proto.Message): + r"""Details of the OCI Cloud Account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_account (str): + Output only. OCI account name. + cloud_account_home_region (str): + Output only. OCI account home region. + link_existing_account_uri (str): + Output only. URL to link an existing account. + + This field is a member of `oneof`_ ``_link_existing_account_uri``. + account_creation_uri (str): + Output only. URL to create a new account and + link. + + This field is a member of `oneof`_ ``_account_creation_uri``. + """ + + cloud_account: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_account_home_region: str = proto.Field( + proto.STRING, + number=2, + ) + link_existing_account_uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + account_creation_uri: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py new file mode 100644 index 000000000000..b023bb9f1f34 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py @@ -0,0 +1,468 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + }, +) + + +class CloudExadataInfrastructure(proto.Message): + r"""Represents CloudExadataInfrastructure resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + Attributes: + name (str): + Identifier. The name of the Exadata Infrastructure resource + with the format: + projects/{project}/locations/{region}/cloudExadataInfrastructures/{cloud_exadata_infrastructure} + display_name (str): + Optional. User friendly name for this + resource. + gcp_oracle_zone (str): + Optional. Google Cloud Platform location + where Oracle Exadata is hosted. + entitlement_id (str): + Output only. Entitlement ID of the private + offer against which this infrastructure resource + is provisioned. + properties (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructureProperties): + Optional. Various properties of the infra. + labels (MutableMapping[str, str]): + Optional. Labels or tags associated with the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the + Exadata Infrastructure was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + gcp_oracle_zone: str = proto.Field( + proto.STRING, + number=8, + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=4, + ) + properties: "CloudExadataInfrastructureProperties" = proto.Field( + proto.MESSAGE, + number=5, + message="CloudExadataInfrastructureProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +class CloudExadataInfrastructureProperties(proto.Message): + r"""Various properties of Exadata Infrastructure. + + Attributes: + ocid (str): + Output only. OCID of created infra. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + compute_count (int): + Optional. The number of compute servers for + the Exadata Infrastructure. + storage_count (int): + Optional. The number of Cloud Exadata storage + servers for the Exadata Infrastructure. + total_storage_size_gb (int): + Optional. The total storage allocated to the + Exadata Infrastructure resource, in gigabytes + (GB). + available_storage_size_gb (int): + Output only. The available storage can be + allocated to the Exadata Infrastructure + resource, in gigabytes (GB). + maintenance_window (google.cloud.oracledatabase_v1.types.MaintenanceWindow): + Optional. Maintenance window for repair. + state (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructureProperties.State): + Output only. The current lifecycle state of + the Exadata Infrastructure. + shape (str): + Required. The shape of the Exadata + Infrastructure. The shape determines the amount + of CPU, storage, and memory resources allocated + to the instance. + oci_url (str): + Output only. Deep link to the OCI console to + view this resource. + cpu_count (int): + Optional. The number of enabled CPU cores. + max_cpu_count (int): + Output only. The total number of CPU cores + available. + memory_size_gb (int): + Optional. The memory allocated in GBs. + max_memory_gb (int): + Output only. The total memory available in + GBs. + db_node_storage_size_gb (int): + Optional. The local node storage allocated in + GBs. + max_db_node_storage_size_gb (int): + Output only. The total local node storage + available in GBs. + data_storage_size_tb (float): + Output only. Size, in terabytes, of the DATA + disk group. + max_data_storage_tb (float): + Output only. The total available DATA disk + group size. + activated_storage_count (int): + Output only. The requested number of + additional storage servers activated for the + Exadata Infrastructure. + additional_storage_count (int): + Output only. The requested number of + additional storage servers for the Exadata + Infrastructure. + db_server_version (str): + Output only. The software version of the + database servers (dom0) in the Exadata + Infrastructure. + storage_server_version (str): + Output only. The software version of the + storage servers (cells) in the Exadata + Infrastructure. + next_maintenance_run_id (str): + Output only. The OCID of the next maintenance + run. + next_maintenance_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next + maintenance run will occur. + next_security_maintenance_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next security + maintenance run will occur. + customer_contacts (MutableSequence[google.cloud.oracledatabase_v1.types.CustomerContact]): + Optional. The list of customer contacts. + monthly_storage_server_version (str): + Output only. The monthly software version of + the storage servers (cells) in the Exadata + Infrastructure. Example: 20.1.15 + monthly_db_server_version (str): + Output only. The monthly software version of + the database servers (dom0) in the Exadata + Infrastructure. Example: 20.1.15 + """ + + class State(proto.Enum): + r"""The various lifecycle states of the Exadata Infrastructure. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + The Exadata Infrastructure is being + provisioned. + AVAILABLE (2): + The Exadata Infrastructure is available for + use. + UPDATING (3): + The Exadata Infrastructure is being updated. + TERMINATING (4): + The Exadata Infrastructure is being + terminated. + TERMINATED (5): + The Exadata Infrastructure is terminated. + FAILED (6): + The Exadata Infrastructure is in failed + state. + MAINTENANCE_IN_PROGRESS (7): + The Exadata Infrastructure is in maintenance. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + TERMINATING = 4 + TERMINATED = 5 + FAILED = 6 + MAINTENANCE_IN_PROGRESS = 7 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + compute_count: int = proto.Field( + proto.INT32, + number=2, + ) + storage_count: int = proto.Field( + proto.INT32, + number=3, + ) + total_storage_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + available_storage_size_gb: int = proto.Field( + proto.INT32, + number=5, + ) + maintenance_window: "MaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=6, + message="MaintenanceWindow", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + shape: str = proto.Field( + proto.STRING, + number=8, + ) + oci_url: str = proto.Field( + proto.STRING, + number=9, + ) + cpu_count: int = proto.Field( + proto.INT32, + number=10, + ) + max_cpu_count: int = proto.Field( + proto.INT32, + number=11, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=12, + ) + max_memory_gb: int = proto.Field( + proto.INT32, + number=13, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=14, + ) + max_db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=15, + ) + data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=16, + ) + max_data_storage_tb: float = proto.Field( + proto.DOUBLE, + number=17, + ) + activated_storage_count: int = proto.Field( + proto.INT32, + number=18, + ) + additional_storage_count: int = proto.Field( + proto.INT32, + number=19, + ) + db_server_version: str = proto.Field( + proto.STRING, + number=20, + ) + storage_server_version: str = proto.Field( + proto.STRING, + number=21, + ) + next_maintenance_run_id: str = proto.Field( + proto.STRING, + number=22, + ) + next_maintenance_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + next_security_maintenance_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + customer_contacts: MutableSequence[common.CustomerContact] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=common.CustomerContact, + ) + monthly_storage_server_version: str = proto.Field( + proto.STRING, + number=26, + ) + monthly_db_server_version: str = proto.Field( + proto.STRING, + number=27, + ) + + +class MaintenanceWindow(proto.Message): + r"""Maintenance window as defined by Oracle. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/MaintenanceWindow + + Attributes: + preference (google.cloud.oracledatabase_v1.types.MaintenanceWindow.MaintenanceWindowPreference): + Optional. The maintenance window scheduling + preference. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Months during the year when + maintenance should be performed. + weeks_of_month (MutableSequence[int]): + Optional. Weeks during the month when + maintenance should be performed. Weeks start on + the 1st, 8th, 15th, and 22nd days of the month, + and have a duration of 7 days. Weeks start and + end based on calendar dates, not days of the + week. + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Days during the week when + maintenance should be performed. + hours_of_day (MutableSequence[int]): + Optional. The window of hours during the day + when maintenance should be performed. The window + is a 4 hour slot. Valid values are: + 0 - represents time slot 0:00 - 3:59 UTC + 4 - represents time slot 4:00 - 7:59 UTC + 8 - represents time slot 8:00 - 11:59 UTC + 12 - represents time slot 12:00 - 15:59 UTC + 16 - represents time slot 16:00 - 19:59 UTC + 20 - represents time slot 20:00 - 23:59 UTC + lead_time_week (int): + Optional. Lead time window allows user to set + a lead time to prepare for a down time. The lead + time is in weeks and valid value is between 1 to + 4. + patching_mode (google.cloud.oracledatabase_v1.types.MaintenanceWindow.PatchingMode): + Optional. Cloud CloudExadataInfrastructure + node patching method, either "ROLLING" + or "NONROLLING". Default value is ROLLING. + custom_action_timeout_mins (int): + Optional. Determines the amount of time the + system will wait before the start of each + database server patching operation. Custom + action timeout is in minutes and valid value is + between 15 to 120 (inclusive). + is_custom_action_timeout_enabled (bool): + Optional. If true, enables the configuration + of a custom action timeout (waiting period) + between database server patching operations. + """ + + class MaintenanceWindowPreference(proto.Enum): + r"""Maintenance window preference. + + Values: + MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED (0): + Default unspecified value. + CUSTOM_PREFERENCE (1): + Custom preference. + NO_PREFERENCE (2): + No preference. + """ + MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED = 0 + CUSTOM_PREFERENCE = 1 + NO_PREFERENCE = 2 + + class PatchingMode(proto.Enum): + r"""Patching mode. + + Values: + PATCHING_MODE_UNSPECIFIED (0): + Default unspecified value. + ROLLING (1): + Updates the Cloud Exadata database server + hosts in a rolling fashion. + NON_ROLLING (2): + The non-rolling maintenance method first + updates your storage servers at the same time, + then your database servers at the same time. + """ + PATCHING_MODE_UNSPECIFIED = 0 + ROLLING = 1 + NON_ROLLING = 2 + + preference: MaintenanceWindowPreference = proto.Field( + proto.ENUM, + number=1, + enum=MaintenanceWindowPreference, + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=month_pb2.Month, + ) + weeks_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=3, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=dayofweek_pb2.DayOfWeek, + ) + hours_of_day: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=5, + ) + lead_time_week: int = proto.Field( + proto.INT32, + number=6, + ) + patching_mode: PatchingMode = proto.Field( + proto.ENUM, + number=7, + enum=PatchingMode, + ) + custom_action_timeout_mins: int = proto.Field( + proto.INT32, + number=8, + ) + is_custom_action_timeout_enabled: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py new file mode 100644 index 000000000000..1ecf83198d06 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "GiVersion", + }, +) + + +class GiVersion(proto.Message): + r"""Details of the Oracle Grid Infrastructure (GI) version + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/GiVersionSummary/ + + Attributes: + name (str): + Identifier. The name of the Oracle Grid Infrastructure (GI) + version resource with the format: + projects/{project}/locations/{region}/giVersions/{gi_versions} + version (str): + Optional. version + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py new file mode 100644 index 000000000000..f81798592e71 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "LocationMetadata", + }, +) + + +class LocationMetadata(proto.Message): + r"""Metadata for a given [Location][google.cloud.location.Location]. + + Attributes: + gcp_oracle_zones (MutableSequence[str]): + Output only. Google Cloud Platform Oracle + zones in a location. + """ + + gcp_oracle_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py new file mode 100644 index 000000000000..796dbe2203f9 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py @@ -0,0 +1,1244 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "GetCloudExadataInfrastructureRequest", + "CreateCloudExadataInfrastructureRequest", + "DeleteCloudExadataInfrastructureRequest", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "GetCloudVmClusterRequest", + "CreateCloudVmClusterRequest", + "DeleteCloudVmClusterRequest", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "OperationMetadata", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "GetAutonomousDatabaseRequest", + "CreateAutonomousDatabaseRequest", + "DeleteAutonomousDatabaseRequest", + "RestoreAutonomousDatabaseRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + }, +) + + +class ListCloudExadataInfrastructuresRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructures.List``. + + Attributes: + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the following + format: projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Exadata + infrastructures will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListCloudExadataInfrastructuresResponse(proto.Message): + r"""The response for ``CloudExadataInfrastructures.list``. + + Attributes: + cloud_exadata_infrastructures (MutableSequence[google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure]): + The list of Exadata Infrastructures. + next_page_token (str): + A token for fetching next page of response. + """ + + @property + def raw_page(self): + return self + + cloud_exadata_infrastructures: MutableSequence[ + exadata_infra.CloudExadataInfrastructure + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=exadata_infra.CloudExadataInfrastructure, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Get``. + + Attributes: + name (str): + Required. The name of the Cloud Exadata Infrastructure in + the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Create``. + + Attributes: + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the following + format: projects/{project}/locations/{location}. + cloud_exadata_infrastructure_id (str): + Required. The ID of the Exadata Infrastructure to create. + This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a maximum + of 63 characters in length. The value must start with a + letter and end with a letter or a number. + cloud_exadata_infrastructure (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure): + Required. Details of the Exadata + Infrastructure instance to create. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_exadata_infrastructure_id: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = ( + proto.Field( + proto.MESSAGE, + number=3, + message=exadata_infra.CloudExadataInfrastructure, + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Delete``. + + Attributes: + name (str): + Required. The name of the Cloud Exadata Infrastructure in + the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, all VM clusters for + this Exadata Infrastructure will be deleted. An + Exadata Infrastructure can only be deleted once + all its VM clusters have been deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListCloudVmClustersRequest(proto.Message): + r"""The request for ``CloudVmCluster.List``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The number of VM clusters to + return. If unspecified, at most 50 VM clusters + will be returned. The maximum value is 1,000. + page_token (str): + Optional. A token identifying the page of + results the server returns. + filter (str): + Optional. An expression for filtering the + results of the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListCloudVmClustersResponse(proto.Message): + r"""The response for ``CloudVmCluster.List``. + + Attributes: + cloud_vm_clusters (MutableSequence[google.cloud.oracledatabase_v1.types.CloudVmCluster]): + The list of VM Clusters. + next_page_token (str): + A token to fetch the next page of results. + """ + + @property + def raw_page(self): + return self + + cloud_vm_clusters: MutableSequence[vm_cluster.CloudVmCluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vm_cluster.CloudVmCluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Get``. + + Attributes: + name (str): + Required. The name of the Cloud VM Cluster in the following + format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Create``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + cloud_vm_cluster_id (str): + Required. The ID of the VM Cluster to create. This value is + restricted to (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and + must be a maximum of 63 characters in length. The value must + start with a letter and end with a letter or a number. + cloud_vm_cluster (google.cloud.oracledatabase_v1.types.CloudVmCluster): + Required. The resource being created + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_vm_cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_vm_cluster: vm_cluster.CloudVmCluster = proto.Field( + proto.MESSAGE, + number=3, + message=vm_cluster.CloudVmCluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Delete``. + + Attributes: + name (str): + Required. The name of the Cloud VM Cluster in the following + format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, all child resources + for the VM Cluster will be deleted. A VM Cluster + can only be deleted once all its child resources + have been deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListEntitlementsRequest(proto.Message): + r"""The request for ``Entitlement.List``. + + Attributes: + parent (str): + Required. The parent value for the + entitlement in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 + entitlements will be returned. The maximum value + is 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEntitlementsResponse(proto.Message): + r"""The response for ``Entitlement.List``. + + Attributes: + entitlements (MutableSequence[google.cloud.oracledatabase_v1.types.Entitlement]): + The list of Entitlements + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + entitlements: MutableSequence[entitlement.Entitlement] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entitlement.Entitlement, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbServersRequest(proto.Message): + r"""The request for ``DbServer.List``. + + Attributes: + parent (str): + Required. The parent value for database + server in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloudExadataInfrastructure}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 db + servers will be returned. The maximum value is + 1000; values above 1000 will be reset to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbServersResponse(proto.Message): + r"""The response for ``DbServer.List``. + + Attributes: + db_servers (MutableSequence[google.cloud.oracledatabase_v1.types.DbServer]): + The list of database servers. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + db_servers: MutableSequence[db_server.DbServer] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_server.DbServer, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbNodesRequest(proto.Message): + r"""The request for ``DbNode.List``. + + Attributes: + parent (str): + Required. The parent value for database node + in the following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloudVmCluster}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 db nodes will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the node should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbNodesResponse(proto.Message): + r"""The response for ``DbNode.List``. + + Attributes: + db_nodes (MutableSequence[google.cloud.oracledatabase_v1.types.DbNode]): + The list of DB Nodes + next_page_token (str): + A token identifying a page of results the + node should return. + """ + + @property + def raw_page(self): + return self + + db_nodes: MutableSequence[db_node.DbNode] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_node.DbNode, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListGiVersionsRequest(proto.Message): + r"""The request for ``GiVersion.List``. + + Attributes: + parent (str): + Required. The parent value for Grid + Infrastructure Version in the following format: + Format: projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 Oracle + Grid Infrastructure (GI) versions will be + returned. The maximum value is 1000; values + above 1000 will be reset to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListGiVersionsResponse(proto.Message): + r"""The response for ``GiVersion.List``. + + Attributes: + gi_versions (MutableSequence[google.cloud.oracledatabase_v1.types.GiVersion]): + The list of Oracle Grid Infrastructure (GI) + versions. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + gi_versions: MutableSequence[gi_version.GiVersion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gi_version.GiVersion, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbSystemShapesRequest(proto.Message): + r"""The request for ``DbSystemShape.List``. + + Attributes: + parent (str): + Required. The parent value for Database + System Shapes in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 database + system shapes will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbSystemShapesResponse(proto.Message): + r"""The response for ``DbSystemShape.List``. + + Attributes: + db_system_shapes (MutableSequence[google.cloud.oracledatabase_v1.types.DbSystemShape]): + The list of Database System shapes. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + db_system_shapes: MutableSequence[ + db_system_shape.DbSystemShape + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_system_shape.DbSystemShape, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. The status of the operation. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + percent_complete (float): + Output only. An estimated percentage of the + operation that has been completed at a given + moment of time, between 0 and 100. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + percent_complete: float = proto.Field( + proto.DOUBLE, + number=8, + ) + + +class ListAutonomousDatabasesRequest(proto.Message): + r"""The request for ``AutonomousDatabase.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous + Database will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression for filtering the + results of the request. + order_by (str): + Optional. An expression for ordering the + results of the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAutonomousDatabasesResponse(proto.Message): + r"""The response for ``AutonomousDatabase.List``. + + Attributes: + autonomous_databases (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabase]): + The list of Autonomous Databases. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_databases: MutableSequence[ + gco_autonomous_database.AutonomousDatabase + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gco_autonomous_database.AutonomousDatabase, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Get``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Create``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + autonomous_database_id (str): + Required. The ID of the Autonomous Database to create. This + value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a maximum + of 63 characters in length. The value must start with a + letter and end with a letter or a number. + autonomous_database (google.cloud.oracledatabase_v1.types.AutonomousDatabase): + Required. The Autonomous Database being + created. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + autonomous_database_id: str = proto.Field( + proto.STRING, + number=2, + ) + autonomous_database: gco_autonomous_database.AutonomousDatabase = proto.Field( + proto.MESSAGE, + number=3, + message=gco_autonomous_database.AutonomousDatabase, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Delete``. + + Attributes: + name (str): + Required. The name of the resource in the following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Restore``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + restore_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time and date to restore the + database to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + restore_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class GenerateAutonomousDatabaseWalletRequest(proto.Message): + r"""The request for ``AutonomousDatabase.GenerateWallet``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + type_ (google.cloud.oracledatabase_v1.types.GenerateType): + Optional. The type of wallet generation for + the Autonomous Database. The default value is + SINGLE. + is_regional (bool): + Optional. True when requesting regional + connection strings in PDB connect info, + applicable to cross-region Data Guard only. + password (str): + Required. The password used to encrypt the + keys inside the wallet. The password must be a + minimum of 8 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: gco_autonomous_database.GenerateType = proto.Field( + proto.ENUM, + number=2, + enum=gco_autonomous_database.GenerateType, + ) + is_regional: bool = proto.Field( + proto.BOOL, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GenerateAutonomousDatabaseWalletResponse(proto.Message): + r"""The response for ``AutonomousDatabase.GenerateWallet``. + + Attributes: + archive_content (bytes): + Output only. The base64 encoded wallet files. + """ + + archive_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class ListAutonomousDbVersionsRequest(proto.Message): + r"""The request for ``AutonomousDbVersion.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Versions will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAutonomousDbVersionsResponse(proto.Message): + r"""The response for ``AutonomousDbVersion.List``. + + Attributes: + autonomous_db_versions (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDbVersion]): + The list of Autonomous Database versions. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_db_versions: MutableSequence[ + autonomous_db_version.AutonomousDbVersion + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_db_version.AutonomousDbVersion, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAutonomousDatabaseCharacterSetsRequest(proto.Message): + r"""The request for ``AutonomousDatabaseCharacterSet.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Character Sets will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression for filtering the results of the + request. Only the **character_set_type** field is supported + in the following format: + ``character_set_type="{characterSetType}"``. Accepted values + include ``DATABASE`` and ``NATIONAL``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAutonomousDatabaseCharacterSetsResponse(proto.Message): + r"""The response for ``AutonomousDatabaseCharacterSet.List``. + + Attributes: + autonomous_database_character_sets (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabaseCharacterSet]): + The list of Autonomous Database Character + Sets. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_database_character_sets: MutableSequence[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_database_character_set.AutonomousDatabaseCharacterSet, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAutonomousDatabaseBackupsRequest(proto.Message): + r"""The request for ``AutonomousDatabaseBackup.List``. + + Attributes: + parent (str): + Required. The parent value for + ListAutonomousDatabaseBackups in the following + format: projects/{project}/locations/{location}. + filter (str): + Optional. An expression for filtering the results of the + request. Only the **autonomous_database_id** field is + supported in the following format: + ``autonomous_database_id="{autonomous_database_id}"``. The + accepted values must be a valid Autonomous Database ID, + limited to the naming restrictions of the ID: + ^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). The ID must start + with a letter, end with a letter or a number, and be a + maximum of 63 characters. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Backups will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAutonomousDatabaseBackupsResponse(proto.Message): + r"""The response for ``AutonomousDatabaseBackup.List``. + + Attributes: + autonomous_database_backups (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackup]): + The list of Autonomous Database Backups. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_database_backups: MutableSequence[ + autonomous_db_backup.AutonomousDatabaseBackup + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_db_backup.AutonomousDatabaseBackup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py new file mode 100644 index 000000000000..44104d291bd3 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", + }, +) + + +class CloudVmCluster(proto.Message): + r"""Details of the Cloud VM Cluster resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + Attributes: + name (str): + Identifier. The name of the VM Cluster resource with the + format: + projects/{project}/locations/{region}/cloudVmClusters/{cloud_vm_cluster} + exadata_infrastructure (str): + Required. The name of the Exadata Infrastructure resource on + which VM cluster resource is created, in the following + format: + projects/{project}/locations/{region}/cloudExadataInfrastuctures/{cloud_extradata_infrastructure} + display_name (str): + Optional. User friendly name for this + resource. + gcp_oracle_zone (str): + Output only. Google Cloud Platform location + where Oracle Exadata is hosted. It is same as + Google Cloud Platform Oracle zone of Exadata + infrastructure. + properties (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties): + Optional. Various properties of the VM + Cluster. + labels (MutableMapping[str, str]): + Optional. Labels or tags associated with the + VM Cluster. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the VM + cluster was created. + cidr (str): + Required. Network settings. CIDR to use for + cluster IP allocation. + backup_subnet_cidr (str): + Required. CIDR range of the backup subnet. + network (str): + Required. The name of the VPC network. + Format: + projects/{project}/global/networks/{network} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + exadata_infrastructure: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + gcp_oracle_zone: str = proto.Field( + proto.STRING, + number=12, + ) + properties: "CloudVmClusterProperties" = proto.Field( + proto.MESSAGE, + number=6, + message="CloudVmClusterProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + cidr: str = proto.Field( + proto.STRING, + number=9, + ) + backup_subnet_cidr: str = proto.Field( + proto.STRING, + number=10, + ) + network: str = proto.Field( + proto.STRING, + number=11, + ) + + +class CloudVmClusterProperties(proto.Message): + r"""Various properties and settings associated with Exadata VM + cluster. + + Attributes: + ocid (str): + Output only. Oracle Cloud Infrastructure ID + of VM Cluster. + license_type (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.LicenseType): + Required. License type of VM Cluster. + gi_version (str): + Optional. Grid Infrastructure Version. + time_zone (google.type.datetime_pb2.TimeZone): + Optional. Time zone of VM Cluster to set. + Defaults to UTC if not specified. + ssh_public_keys (MutableSequence[str]): + Optional. SSH public keys to be stored with + cluster. + node_count (int): + Optional. Number of database servers. + shape (str): + Output only. Shape of VM Cluster. + ocpu_count (float): + Optional. OCPU count per VM. Minimum is 0.1. + memory_size_gb (int): + Optional. Memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per VM. + storage_size_gb (int): + Output only. The storage allocation for the + disk group, in gigabytes (GB). + data_storage_size_tb (float): + Optional. The data disk group size to be + allocated in TBs. + disk_redundancy (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.DiskRedundancy): + Optional. The type of redundancy. + sparse_diskgroup_enabled (bool): + Optional. Use exadata sparse snapshots. + local_backup_enabled (bool): + Optional. Use local backup. + hostname_prefix (str): + Optional. Prefix for VM cluster host names. + diagnostics_data_collection_options (google.cloud.oracledatabase_v1.types.DataCollectionOptions): + Optional. Data collection options for + diagnostics. + state (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.State): + Output only. State of the cluster. + scan_listener_port_tcp (int): + Output only. SCAN listener port - TCP + scan_listener_port_tcp_ssl (int): + Output only. SCAN listener port - TLS + domain (str): + Output only. Parent DNS domain where SCAN DNS + and hosts names are qualified. ex: + ocispdelegated.ocisp10jvnet.oraclevcn.com + scan_dns (str): + Output only. SCAN DNS name. + ex: + sp2-yi0xq-scan.ocispdelegated.ocisp10jvnet.oraclevcn.com + hostname (str): + Output only. host name without domain. format: + "-" with some suffix. ex: sp2-yi0xq where + "sp2" is the hostname_prefix. + cpu_core_count (int): + Required. Number of enabled CPU cores. + system_version (str): + Output only. Operating system version of the + image. + scan_ip_ids (MutableSequence[str]): + Output only. OCIDs of scan IPs. + scan_dns_record_id (str): + Output only. OCID of scan DNS record. + oci_url (str): + Output only. Deep link to the OCI console to + view this resource. + db_server_ocids (MutableSequence[str]): + Optional. OCID of database servers. + compartment_id (str): + Output only. Compartment ID of cluster. + dns_listener_ip (str): + Output only. DNS listener IP. + cluster_name (str): + Optional. OCI Cluster name. + """ + + class LicenseType(proto.Enum): + r"""Different licenses supported. + + Values: + LICENSE_TYPE_UNSPECIFIED (0): + Unspecified + LICENSE_INCLUDED (1): + License included part of offer + BRING_YOUR_OWN_LICENSE (2): + Bring your own license + """ + LICENSE_TYPE_UNSPECIFIED = 0 + LICENSE_INCLUDED = 1 + BRING_YOUR_OWN_LICENSE = 2 + + class DiskRedundancy(proto.Enum): + r"""Types of disk redundancy provided by Oracle. + + Values: + DISK_REDUNDANCY_UNSPECIFIED (0): + Unspecified. + HIGH (1): + High - 3 way mirror. + NORMAL (2): + Normal - 2 way mirror. + """ + DISK_REDUNDANCY_UNSPECIFIED = 0 + HIGH = 1 + NORMAL = 2 + + class State(proto.Enum): + r"""The various lifecycle states of the VM cluster. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the resource is in + provisioning state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UPDATING (3): + Indicates that the resource is in updating + state. + TERMINATING (4): + Indicates that the resource is in terminating + state. + TERMINATED (5): + Indicates that the resource is in terminated + state. + FAILED (6): + Indicates that the resource is in failed + state. + MAINTENANCE_IN_PROGRESS (7): + Indicates that the resource is in maintenance + in progress state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + TERMINATING = 4 + TERMINATED = 5 + FAILED = 6 + MAINTENANCE_IN_PROGRESS = 7 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + license_type: LicenseType = proto.Field( + proto.ENUM, + number=2, + enum=LicenseType, + ) + gi_version: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=datetime_pb2.TimeZone, + ) + ssh_public_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + node_count: int = proto.Field( + proto.INT32, + number=6, + ) + shape: str = proto.Field( + proto.STRING, + number=7, + ) + ocpu_count: float = proto.Field( + proto.FLOAT, + number=8, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=9, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=10, + ) + storage_size_gb: int = proto.Field( + proto.INT32, + number=11, + ) + data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=12, + ) + disk_redundancy: DiskRedundancy = proto.Field( + proto.ENUM, + number=13, + enum=DiskRedundancy, + ) + sparse_diskgroup_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + local_backup_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + hostname_prefix: str = proto.Field( + proto.STRING, + number=16, + ) + diagnostics_data_collection_options: "DataCollectionOptions" = proto.Field( + proto.MESSAGE, + number=19, + message="DataCollectionOptions", + ) + state: State = proto.Field( + proto.ENUM, + number=20, + enum=State, + ) + scan_listener_port_tcp: int = proto.Field( + proto.INT32, + number=21, + ) + scan_listener_port_tcp_ssl: int = proto.Field( + proto.INT32, + number=22, + ) + domain: str = proto.Field( + proto.STRING, + number=23, + ) + scan_dns: str = proto.Field( + proto.STRING, + number=24, + ) + hostname: str = proto.Field( + proto.STRING, + number=25, + ) + cpu_core_count: int = proto.Field( + proto.INT32, + number=26, + ) + system_version: str = proto.Field( + proto.STRING, + number=27, + ) + scan_ip_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + scan_dns_record_id: str = proto.Field( + proto.STRING, + number=29, + ) + oci_url: str = proto.Field( + proto.STRING, + number=30, + ) + db_server_ocids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=31, + ) + compartment_id: str = proto.Field( + proto.STRING, + number=32, + ) + dns_listener_ip: str = proto.Field( + proto.STRING, + number=35, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=36, + ) + + +class DataCollectionOptions(proto.Message): + r"""Data collection options for diagnostics. + + Attributes: + diagnostics_events_enabled (bool): + Optional. Indicates whether diagnostic + collection is enabled for the VM cluster + health_monitoring_enabled (bool): + Optional. Indicates whether health monitoring + is enabled for the VM cluster + incident_logs_enabled (bool): + Optional. Indicates whether incident logs and + trace collection are enabled for the VM cluster + """ + + diagnostics_events_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + health_monitoring_enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + incident_logs_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/mypy.ini b/packages/google-cloud-oracledatabase/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-oracledatabase/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-oracledatabase/noxfile.py b/packages/google-cloud-oracledatabase/noxfile.py new file mode 100644 index 000000000000..aeee7851401a --- /dev/null +++ b/packages/google-cloud-oracledatabase/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py new file mode 100644 index 000000000000..8812689ffc11 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + autonomous_database = oracledatabase_v1.AutonomousDatabase() + autonomous_database.network = "network_value" + autonomous_database.cidr = "cidr_value" + + request = oracledatabase_v1.CreateAutonomousDatabaseRequest( + parent="parent_value", + autonomous_database_id="autonomous_database_id_value", + autonomous_database=autonomous_database, + ) + + # Make the request + operation = client.create_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..298e7b566d84 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.CreateCloudExadataInfrastructureRequest( + parent="parent_value", + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + # Make the request + operation = client.create_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..4f172f9ce515 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + cloud_vm_cluster = oracledatabase_v1.CloudVmCluster() + cloud_vm_cluster.exadata_infrastructure = "exadata_infrastructure_value" + cloud_vm_cluster.cidr = "cidr_value" + cloud_vm_cluster.backup_subnet_cidr = "backup_subnet_cidr_value" + cloud_vm_cluster.network = "network_value" + + request = oracledatabase_v1.CreateCloudVmClusterRequest( + parent="parent_value", + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + cloud_vm_cluster=cloud_vm_cluster, + ) + + # Make the request + operation = client.create_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py new file mode 100644 index 000000000000..44f792f2869d --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..bbbc8441a482 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..42bdd4641c93 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py new file mode 100644 index 000000000000..3f2cdc2849ae --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAutonomousDatabaseWallet +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_generate_autonomous_database_wallet(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GenerateAutonomousDatabaseWalletRequest( + name="name_value", + password="password_value", + ) + + # Make the request + response = client.generate_autonomous_database_wallet(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py new file mode 100644 index 000000000000..6273b25ca4e7 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_autonomous_database(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..abbfae94a61b --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_exadata_infrastructure(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..76e6c851c122 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_vm_cluster(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py new file mode 100644 index 000000000000..217be56f9f1e --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabaseBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_database_backups(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py new file mode 100644 index 000000000000..d33d179e389a --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabaseCharacterSets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_database_character_sets(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseCharacterSetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_character_sets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py new file mode 100644 index 000000000000..4153a3ad56f5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_databases(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py new file mode 100644 index 000000000000..3ffeb9c12ee1 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDbVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_db_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDbVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_db_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py new file mode 100644 index 000000000000..9fa96f7b3216 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCloudExadataInfrastructures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_cloud_exadata_infrastructures(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudExadataInfrastructuresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_exadata_infrastructures(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py new file mode 100644 index 000000000000..4d768bee445f --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCloudVmClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_cloud_vm_clusters(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudVmClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_vm_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py new file mode 100644 index 000000000000..5aeb00470993 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_nodes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py new file mode 100644 index 000000000000..7daf125eb879 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbServers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_servers(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py new file mode 100644 index 000000000000..851a38768000 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbSystemShapes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_system_shapes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbSystemShapesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_system_shapes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py new file mode 100644 index 000000000000..cf7ff1ebdfea --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntitlements +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_entitlements(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListEntitlementsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entitlements(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py new file mode 100644 index 000000000000..23cd229e7552 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGiVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_gi_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListGiVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gi_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py new file mode 100644 index 000000000000..d0e6cf445d21 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_restore_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.RestoreAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.restore_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json b/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json new file mode 100644 index 000000000000..f8cf3fba2f4a --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json @@ -0,0 +1,1815 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.oracledatabase.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-oracledatabase", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateAutonomousDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "autonomous_database", + "type": "google.cloud.oracledatabase_v1.types.AutonomousDatabase" + }, + { + "name": "autonomous_database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_autonomous_database" + }, + "description": "Sample for CreateAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateCloudExadataInfrastructureRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cloud_exadata_infrastructure", + "type": "google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure" + }, + { + "name": "cloud_exadata_infrastructure_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cloud_exadata_infrastructure" + }, + "description": "Sample for CreateCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateCloudVmClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cloud_vm_cluster", + "type": "google.cloud.oracledatabase_v1.types.CloudVmCluster" + }, + { + "name": "cloud_vm_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cloud_vm_cluster" + }, + "description": "Sample for CreateCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_autonomous_database" + }, + "description": "Sample for DeleteAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteCloudExadataInfrastructureRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cloud_exadata_infrastructure" + }, + "description": "Sample for DeleteCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteCloudVmClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cloud_vm_cluster" + }, + "description": "Sample for DeleteCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.generate_autonomous_database_wallet", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GenerateAutonomousDatabaseWallet", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GenerateAutonomousDatabaseWallet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "type_", + "type": "google.cloud.oracledatabase_v1.types.GenerateType" + }, + { + "name": "is_regional", + "type": "bool" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletResponse", + "shortName": "generate_autonomous_database_wallet" + }, + "description": "Sample for GenerateAutonomousDatabaseWallet", + "file": "oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.AutonomousDatabase", + "shortName": "get_autonomous_database" + }, + "description": "Sample for GetAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetCloudExadataInfrastructureRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure", + "shortName": "get_cloud_exadata_infrastructure" + }, + "description": "Sample for GetCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetCloudVmClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.CloudVmCluster", + "shortName": "get_cloud_vm_cluster" + }, + "description": "Sample for GetCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_database_backups", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabaseBackups", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabaseBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseBackupsPager", + "shortName": "list_autonomous_database_backups" + }, + "description": "Sample for ListAutonomousDatabaseBackups", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_database_character_sets", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabaseCharacterSets", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabaseCharacterSets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseCharacterSetsPager", + "shortName": "list_autonomous_database_character_sets" + }, + "description": "Sample for ListAutonomousDatabaseCharacterSets", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_databases", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabases", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabasesPager", + "shortName": "list_autonomous_databases" + }, + "description": "Sample for ListAutonomousDatabases", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_db_versions", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDbVersions", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDbVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDbVersionsPager", + "shortName": "list_autonomous_db_versions" + }, + "description": "Sample for ListAutonomousDbVersions", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_cloud_exadata_infrastructures", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListCloudExadataInfrastructures", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListCloudExadataInfrastructures" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudExadataInfrastructuresPager", + "shortName": "list_cloud_exadata_infrastructures" + }, + "description": "Sample for ListCloudExadataInfrastructures", + "file": "oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_cloud_vm_clusters", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListCloudVmClusters", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListCloudVmClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudVmClustersPager", + "shortName": "list_cloud_vm_clusters" + }, + "description": "Sample for ListCloudVmClusters", + "file": "oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_nodes", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbNodes", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbNodes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbNodesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbNodesPager", + "shortName": "list_db_nodes" + }, + "description": "Sample for ListDbNodes", + "file": "oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_servers", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbServers", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbServersPager", + "shortName": "list_db_servers" + }, + "description": "Sample for ListDbServers", + "file": "oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_system_shapes", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbSystemShapes", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbSystemShapes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbSystemShapesPager", + "shortName": "list_db_system_shapes" + }, + "description": "Sample for ListDbSystemShapes", + "file": "oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_entitlements", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListEntitlements", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListEntitlements" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListEntitlementsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListEntitlementsPager", + "shortName": "list_entitlements" + }, + "description": "Sample for ListEntitlements", + "file": "oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_gi_versions", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListGiVersions", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListGiVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListGiVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListGiVersionsPager", + "shortName": "list_gi_versions" + }, + "description": "Sample for ListGiVersions", + "file": "oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.restore_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.RestoreAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "RestoreAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.RestoreAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "restore_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_autonomous_database" + }, + "description": "Sample for RestoreAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py" + } + ] +} diff --git a/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py b/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py new file mode 100644 index 000000000000..177c56933878 --- /dev/null +++ b/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py @@ -0,0 +1,197 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class oracledatabaseCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_autonomous_database': ('parent', 'autonomous_database_id', 'autonomous_database', 'request_id', ), + 'create_cloud_exadata_infrastructure': ('parent', 'cloud_exadata_infrastructure_id', 'cloud_exadata_infrastructure', 'request_id', ), + 'create_cloud_vm_cluster': ('parent', 'cloud_vm_cluster_id', 'cloud_vm_cluster', 'request_id', ), + 'delete_autonomous_database': ('name', 'request_id', ), + 'delete_cloud_exadata_infrastructure': ('name', 'request_id', 'force', ), + 'delete_cloud_vm_cluster': ('name', 'request_id', 'force', ), + 'generate_autonomous_database_wallet': ('name', 'password', 'type_', 'is_regional', ), + 'get_autonomous_database': ('name', ), + 'get_cloud_exadata_infrastructure': ('name', ), + 'get_cloud_vm_cluster': ('name', ), + 'list_autonomous_database_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_autonomous_database_character_sets': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_autonomous_databases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_autonomous_db_versions': ('parent', 'page_size', 'page_token', ), + 'list_cloud_exadata_infrastructures': ('parent', 'page_size', 'page_token', ), + 'list_cloud_vm_clusters': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_db_nodes': ('parent', 'page_size', 'page_token', ), + 'list_db_servers': ('parent', 'page_size', 'page_token', ), + 'list_db_system_shapes': ('parent', 'page_size', 'page_token', ), + 'list_entitlements': ('parent', 'page_size', 'page_token', ), + 'list_gi_versions': ('parent', 'page_size', 'page_token', ), + 'restore_autonomous_database': ('name', 'restore_time', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=oracledatabaseCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the oracledatabase client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-oracledatabase/setup.py b/packages/google-cloud-oracledatabase/setup.py new file mode 100644 index 000000000000..f4dfafa62eff --- /dev/null +++ b/packages/google-cloud-oracledatabase/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-oracledatabase" + + +description = "Google Cloud Oracledatabase API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/oracledatabase/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-oracledatabase/testing/.gitignore b/packages/google-cloud-oracledatabase/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/tests/__init__.py b/packages/google-cloud-oracledatabase/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py new file mode 100644 index 000000000000..05cfe6d9f132 --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py @@ -0,0 +1,10589 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.oracledatabase_v1.services.oracle_database import ( + OracleDatabaseClient, + pagers, + transports, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + common, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OracleDatabaseClient._get_default_mtls_endpoint(None) is None + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert OracleDatabaseClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + OracleDatabaseClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + OracleDatabaseClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert OracleDatabaseClient._get_client_cert_source(None, False) is None + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OracleDatabaseClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + OracleDatabaseClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + OracleDatabaseClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + OracleDatabaseClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, None) + == OracleDatabaseClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + OracleDatabaseClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) +def test_oracle_database_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) +def test_oracle_database_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" + ) + + +def test_oracle_database_client_get_transport_class(): + transport = OracleDatabaseClient.get_transport_class() + available_transports = [ + transports.OracleDatabaseRestTransport, + ] + assert transport in available_transports + + transport = OracleDatabaseClient.get_transport_class("rest") + assert transport == transports.OracleDatabaseRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test_oracle_database_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_oracle_database_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OracleDatabaseClient), +) +def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test_oracle_database_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), + ], +) +def test_oracle_database_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudExadataInfrastructuresRequest, + dict, + ], +) +def test_list_cloud_exadata_infrastructures_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_cloud_exadata_infrastructures(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCloudExadataInfrastructuresPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cloud_exadata_infrastructures + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_exadata_infrastructures + ] = mock_rpc + + request = {} + client.list_cloud_exadata_infrastructures(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cloud_exadata_infrastructures(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cloud_exadata_infrastructures_rest_required_fields( + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_cloud_exadata_infrastructures(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_cloud_exadata_infrastructures_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_cloud_exadata_infrastructures", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_cloud_exadata_infrastructures", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + oracledatabase.ListCloudExadataInfrastructuresRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json( + oracledatabase.ListCloudExadataInfrastructuresResponse() + ) + ) + + request = oracledatabase.ListCloudExadataInfrastructuresRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + + client.list_cloud_exadata_infrastructures( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cloud_exadata_infrastructures_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cloud_exadata_infrastructures(request) + + +def test_list_cloud_exadata_infrastructures_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_cloud_exadata_infrastructures(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) + + +def test_list_cloud_exadata_infrastructures_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cloud_exadata_infrastructures( + oracledatabase.ListCloudExadataInfrastructuresRequest(), + parent="parent_value", + ) + + +def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + ], + next_page_token="abc", + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[], + next_page_token="def", + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + ], + next_page_token="ghi", + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_cloud_exadata_infrastructures(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, exadata_infra.CloudExadataInfrastructure) for i in results + ) + + pages = list( + client.list_cloud_exadata_infrastructures(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudExadataInfrastructureRequest, + dict, + ], +) +def test_get_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure( + name="name_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + entitlement_id="entitlement_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, exadata_infra.CloudExadataInfrastructure) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.entitlement_id == "entitlement_id_value" + + +def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_exadata_infrastructure + ] = mock_rpc + + request = {} + client.get_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cloud_exadata_infrastructure(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_get_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb( + oracledatabase.GetCloudExadataInfrastructureRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json( + exadata_infra.CloudExadataInfrastructure() + ) + + request = oracledatabase.GetCloudExadataInfrastructureRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = exadata_infra.CloudExadataInfrastructure() + + client.get_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cloud_exadata_infrastructure(request) + + +def test_get_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cloud_exadata_infrastructure( + oracledatabase.GetCloudExadataInfrastructureRequest(), + name="name_value", + ) + + +def test_get_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudExadataInfrastructureRequest, + dict, + ], +) +def test_create_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_exadata_infrastructure"] = { + "name": "name_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "entitlement_id": "entitlement_id_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 1413, + "storage_count": 1405, + "total_storage_size_gb": 2234, + "available_storage_size_gb": 2615, + "maintenance_window": { + "preference": 1, + "months": [1], + "weeks_of_month": [1497, 1498], + "days_of_week": [1], + "hours_of_day": [1283, 1284], + "lead_time_week": 1455, + "patching_mode": 1, + "custom_action_timeout_mins": 2804, + "is_custom_action_timeout_enabled": True, + }, + "state": 1, + "shape": "shape_value", + "oci_url": "oci_url_value", + "cpu_count": 976, + "max_cpu_count": 1397, + "memory_size_gb": 1499, + "max_memory_gb": 1382, + "db_node_storage_size_gb": 2401, + "max_db_node_storage_size_gb": 2822, + "data_storage_size_tb": 0.2109, + "max_data_storage_tb": 0.19920000000000002, + "activated_storage_count": 2449, + "additional_storage_count": 2549, + "db_server_version": "db_server_version_value", + "storage_server_version": "storage_server_version_value", + "next_maintenance_run_id": "next_maintenance_run_id_value", + "next_maintenance_run_time": {"seconds": 751, "nanos": 543}, + "next_security_maintenance_run_time": {}, + "customer_contacts": [{"email": "email_value"}], + "monthly_storage_server_version": "monthly_storage_server_version_value", + "monthly_db_server_version": "monthly_db_server_version_value", + }, + "labels": {}, + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields[ + "cloud_exadata_infrastructure" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "cloud_exadata_infrastructure" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["cloud_exadata_infrastructure"][field]) + ): + del request_init["cloud_exadata_infrastructure"][field][i][subfield] + else: + del request_init["cloud_exadata_infrastructure"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_exadata_infrastructure + ] = mock_rpc + + request = {} + client.create_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cloud_exadata_infrastructure_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "cloudExadataInfrastructureId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "cloudExadataInfrastructureId" in jsonified_request + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == request_init["cloud_exadata_infrastructure_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "cloudExadataInfrastructureId" + ] = "cloud_exadata_infrastructure_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cloud_exadata_infrastructure_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "cloudExadataInfrastructureId" in jsonified_request + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == "cloud_exadata_infrastructure_id_value" + ) + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cloud_exadata_infrastructure(request) + + expected_params = [ + ( + "cloudExadataInfrastructureId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "cloudExadataInfrastructureId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudExadataInfrastructureId", + "cloudExadataInfrastructure", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_create_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_create_cloud_exadata_infrastructure", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + oracledatabase.CreateCloudExadataInfrastructureRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.CreateCloudExadataInfrastructureRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cloud_exadata_infrastructure(request) + + +def test_create_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) + + +def test_create_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cloud_exadata_infrastructure( + oracledatabase.CreateCloudExadataInfrastructureRequest(), + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + +def test_create_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + dict, + ], +) +def test_delete_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_exadata_infrastructure + ] = mock_rpc + + request = {} + client.delete_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cloud_exadata_infrastructure(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_delete_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_delete_cloud_exadata_infrastructure", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + oracledatabase.DeleteCloudExadataInfrastructureRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.DeleteCloudExadataInfrastructureRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cloud_exadata_infrastructure(request) + + +def test_delete_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cloud_exadata_infrastructure( + oracledatabase.DeleteCloudExadataInfrastructureRequest(), + name="name_value", + ) + + +def test_delete_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudVmClustersRequest, + dict, + ], +) +def test_list_cloud_vm_clusters_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_cloud_vm_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCloudVmClustersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cloud_vm_clusters + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_vm_clusters + ] = mock_rpc + + request = {} + client.list_cloud_vm_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cloud_vm_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cloud_vm_clusters_rest_required_fields( + request_type=oracledatabase.ListCloudVmClustersRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_cloud_vm_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_cloud_vm_clusters_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_cloud_vm_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListCloudVmClustersRequest.pb( + oracledatabase.ListCloudVmClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json( + oracledatabase.ListCloudVmClustersResponse() + ) + + request = oracledatabase.ListCloudVmClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListCloudVmClustersResponse() + + client.list_cloud_vm_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cloud_vm_clusters_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListCloudVmClustersRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cloud_vm_clusters(request) + + +def test_list_cloud_vm_clusters_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_cloud_vm_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) + + +def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cloud_vm_clusters( + oracledatabase.ListCloudVmClustersRequest(), + parent="parent_value", + ) + + +def test_list_cloud_vm_clusters_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + ], + next_page_token="abc", + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[], + next_page_token="def", + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + ], + next_page_token="ghi", + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_cloud_vm_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vm_cluster.CloudVmCluster) for i in results) + + pages = list(client.list_cloud_vm_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudVmClusterRequest, + dict, + ], +) +def test_get_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster( + name="name_value", + exadata_infrastructure="exadata_infrastructure_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + cidr="cidr_value", + backup_subnet_cidr="backup_subnet_cidr_value", + network="network_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vm_cluster.CloudVmCluster) + assert response.name == "name_value" + assert response.exadata_infrastructure == "exadata_infrastructure_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.cidr == "cidr_value" + assert response.backup_subnet_cidr == "backup_subnet_cidr_value" + assert response.network == "network_value" + + +def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_vm_cluster + ] = mock_rpc + + request = {} + client.get_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.GetCloudVmClusterRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cloud_vm_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetCloudVmClusterRequest.pb( + oracledatabase.GetCloudVmClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vm_cluster.CloudVmCluster.to_json( + vm_cluster.CloudVmCluster() + ) + + request = oracledatabase.GetCloudVmClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vm_cluster.CloudVmCluster() + + client.get_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetCloudVmClusterRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cloud_vm_cluster(request) + + +def test_get_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cloud_vm_cluster( + oracledatabase.GetCloudVmClusterRequest(), + name="name_value", + ) + + +def test_get_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudVmClusterRequest, + dict, + ], +) +def test_create_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_vm_cluster"] = { + "name": "name_value", + "exadata_infrastructure": "exadata_infrastructure_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "properties": { + "ocid": "ocid_value", + "license_type": 1, + "gi_version": "gi_version_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "ssh_public_keys": ["ssh_public_keys_value1", "ssh_public_keys_value2"], + "node_count": 1070, + "shape": "shape_value", + "ocpu_count": 0.1087, + "memory_size_gb": 1499, + "db_node_storage_size_gb": 2401, + "storage_size_gb": 1591, + "data_storage_size_tb": 0.2109, + "disk_redundancy": 1, + "sparse_diskgroup_enabled": True, + "local_backup_enabled": True, + "hostname_prefix": "hostname_prefix_value", + "diagnostics_data_collection_options": { + "diagnostics_events_enabled": True, + "health_monitoring_enabled": True, + "incident_logs_enabled": True, + }, + "state": 1, + "scan_listener_port_tcp": 2356, + "scan_listener_port_tcp_ssl": 2789, + "domain": "domain_value", + "scan_dns": "scan_dns_value", + "hostname": "hostname_value", + "cpu_core_count": 1496, + "system_version": "system_version_value", + "scan_ip_ids": ["scan_ip_ids_value1", "scan_ip_ids_value2"], + "scan_dns_record_id": "scan_dns_record_id_value", + "oci_url": "oci_url_value", + "db_server_ocids": ["db_server_ocids_value1", "db_server_ocids_value2"], + "compartment_id": "compartment_id_value", + "dns_listener_ip": "dns_listener_ip_value", + "cluster_name": "cluster_name_value", + }, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "cidr": "cidr_value", + "backup_subnet_cidr": "backup_subnet_cidr_value", + "network": "network_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields[ + "cloud_vm_cluster" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cloud_vm_cluster"][field])): + del request_init["cloud_vm_cluster"][field][i][subfield] + else: + del request_init["cloud_vm_cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cloud_vm_cluster + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_vm_cluster + ] = mock_rpc + + request = {} + client.create_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.CreateCloudVmClusterRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cloud_vm_cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "cloudVmClusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "cloudVmClusterId" in jsonified_request + assert jsonified_request["cloudVmClusterId"] == request_init["cloud_vm_cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["cloudVmClusterId"] = "cloud_vm_cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cloud_vm_cluster_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "cloudVmClusterId" in jsonified_request + assert jsonified_request["cloudVmClusterId"] == "cloud_vm_cluster_id_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cloud_vm_cluster(request) + + expected_params = [ + ( + "cloudVmClusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "cloudVmClusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudVmClusterId", + "cloudVmCluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateCloudVmClusterRequest.pb( + oracledatabase.CreateCloudVmClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.CreateCloudVmClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateCloudVmClusterRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cloud_vm_cluster(request) + + +def test_create_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) + + +def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cloud_vm_cluster( + oracledatabase.CreateCloudVmClusterRequest(), + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + ) + + +def test_create_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudVmClusterRequest, + dict, + ], +) +def test_delete_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cloud_vm_cluster + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_vm_cluster + ] = mock_rpc + + request = {} + client.delete_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.DeleteCloudVmClusterRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cloud_vm_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb( + oracledatabase.DeleteCloudVmClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.DeleteCloudVmClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteCloudVmClusterRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cloud_vm_cluster(request) + + +def test_delete_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cloud_vm_cluster( + oracledatabase.DeleteCloudVmClusterRequest(), + name="name_value", + ) + + +def test_delete_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListEntitlementsRequest, + dict, + ], +) +def test_list_entitlements_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_entitlements(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitlementsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_entitlements_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entitlements in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entitlements + ] = mock_rpc + + request = {} + client.list_entitlements(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entitlements(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entitlements_rest_required_fields( + request_type=oracledatabase.ListEntitlementsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_entitlements(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_entitlements_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_entitlements._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entitlements_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_entitlements" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_entitlements" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListEntitlementsRequest.pb( + oracledatabase.ListEntitlementsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json( + oracledatabase.ListEntitlementsResponse() + ) + + request = oracledatabase.ListEntitlementsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListEntitlementsResponse() + + client.list_entitlements( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_entitlements_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListEntitlementsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_entitlements(request) + + +def test_list_entitlements_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_entitlements(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/entitlements" + % client.transport._host, + args[1], + ) + + +def test_list_entitlements_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entitlements( + oracledatabase.ListEntitlementsRequest(), + parent="parent_value", + ) + + +def test_list_entitlements_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + entitlement.Entitlement(), + entitlement.Entitlement(), + ], + next_page_token="abc", + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[], + next_page_token="def", + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + ], + next_page_token="ghi", + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + entitlement.Entitlement(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListEntitlementsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_entitlements(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, entitlement.Entitlement) for i in results) + + pages = list(client.list_entitlements(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbServersRequest, + dict, + ], +) +def test_list_db_servers_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_db_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbServersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_db_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_servers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_db_servers] = mock_rpc + + request = {} + client.list_db_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_servers_rest_required_fields( + request_type=oracledatabase.ListDbServersRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_db_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_db_servers_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_db_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_servers_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_servers" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbServersRequest.pb( + oracledatabase.ListDbServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbServersResponse.to_json( + oracledatabase.ListDbServersResponse() + ) + + request = oracledatabase.ListDbServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbServersResponse() + + client.list_db_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_servers_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbServersRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_servers(request) + + +def test_list_db_servers_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_db_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" + % client.transport._host, + args[1], + ) + + +def test_list_db_servers_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_servers( + oracledatabase.ListDbServersRequest(), + parent="parent_value", + ) + + +def test_list_db_servers_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + db_server.DbServer(), + db_server.DbServer(), + ], + next_page_token="abc", + ), + oracledatabase.ListDbServersResponse( + db_servers=[], + next_page_token="def", + ), + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + ], + next_page_token="ghi", + ), + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + db_server.DbServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListDbServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + pager = client.list_db_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_server.DbServer) for i in results) + + pages = list(client.list_db_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbNodesRequest, + dict, + ], +) +def test_list_db_nodes_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_db_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbNodesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_db_nodes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_db_nodes] = mock_rpc + + request = {} + client.list_db_nodes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_nodes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_nodes_rest_required_fields( + request_type=oracledatabase.ListDbNodesRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_db_nodes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_db_nodes_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_db_nodes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_nodes_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_nodes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbNodesRequest.pb( + oracledatabase.ListDbNodesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbNodesResponse.to_json( + oracledatabase.ListDbNodesResponse() + ) + + request = oracledatabase.ListDbNodesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbNodesResponse() + + client.list_db_nodes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_nodes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbNodesRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_nodes(request) + + +def test_list_db_nodes_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_db_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" + % client.transport._host, + args[1], + ) + + +def test_list_db_nodes_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_nodes( + oracledatabase.ListDbNodesRequest(), + parent="parent_value", + ) + + +def test_list_db_nodes_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + db_node.DbNode(), + db_node.DbNode(), + ], + next_page_token="abc", + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[], + next_page_token="def", + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + ], + next_page_token="ghi", + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + db_node.DbNode(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListDbNodesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + pager = client.list_db_nodes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_node.DbNode) for i in results) + + pages = list(client.list_db_nodes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListGiVersionsRequest, + dict, + ], +) +def test_list_gi_versions_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_gi_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGiVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_gi_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_gi_versions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gi_versions + ] = mock_rpc + + request = {} + client.list_gi_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gi_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_gi_versions_rest_required_fields( + request_type=oracledatabase.ListGiVersionsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_gi_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_gi_versions_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_gi_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_gi_versions_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_gi_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListGiVersionsRequest.pb( + oracledatabase.ListGiVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json( + oracledatabase.ListGiVersionsResponse() + ) + + request = oracledatabase.ListGiVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListGiVersionsResponse() + + client.list_gi_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_gi_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListGiVersionsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_gi_versions(request) + + +def test_list_gi_versions_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_gi_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, + args[1], + ) + + +def test_list_gi_versions_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gi_versions( + oracledatabase.ListGiVersionsRequest(), + parent="parent_value", + ) + + +def test_list_gi_versions_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + gi_version.GiVersion(), + gi_version.GiVersion(), + ], + next_page_token="abc", + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[], + next_page_token="def", + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + ], + next_page_token="ghi", + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + gi_version.GiVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListGiVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_gi_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, gi_version.GiVersion) for i in results) + + pages = list(client.list_gi_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbSystemShapesRequest, + dict, + ], +) +def test_list_db_system_shapes_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_db_system_shapes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbSystemShapesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_db_system_shapes + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_db_system_shapes + ] = mock_rpc + + request = {} + client.list_db_system_shapes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_system_shapes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_system_shapes_rest_required_fields( + request_type=oracledatabase.ListDbSystemShapesRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_db_system_shapes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_db_system_shapes_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_db_system_shapes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_system_shapes_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbSystemShapesRequest.pb( + oracledatabase.ListDbSystemShapesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json( + oracledatabase.ListDbSystemShapesResponse() + ) + + request = oracledatabase.ListDbSystemShapesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbSystemShapesResponse() + + client.list_db_system_shapes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_system_shapes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbSystemShapesRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_system_shapes(request) + + +def test_list_db_system_shapes_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_db_system_shapes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" + % client.transport._host, + args[1], + ) + + +def test_list_db_system_shapes_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_system_shapes( + oracledatabase.ListDbSystemShapesRequest(), + parent="parent_value", + ) + + +def test_list_db_system_shapes_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + ], + next_page_token="abc", + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[], + next_page_token="def", + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + ], + next_page_token="ghi", + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_db_system_shapes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_system_shape.DbSystemShape) for i in results) + + pages = list(client.list_db_system_shapes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabasesRequest, + dict, + ], +) +def test_list_autonomous_databases_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_databases + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_databases + ] = mock_rpc + + request = {} + client.list_autonomous_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_databases_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabasesRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_databases_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_autonomous_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_databases_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb( + oracledatabase.ListAutonomousDatabasesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDatabasesResponse.to_json( + oracledatabase.ListAutonomousDatabasesResponse() + ) + ) + + request = oracledatabase.ListAutonomousDatabasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabasesResponse() + + client.list_autonomous_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_databases_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDatabasesRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_databases(request) + + +def test_list_autonomous_databases_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_databases_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_databases( + oracledatabase.ListAutonomousDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_databases_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autonomous_database.AutonomousDatabase) for i in results + ) + + pages = list(client.list_autonomous_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetAutonomousDatabaseRequest, + dict, + ], +) +def test_get_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase( + name="name_value", + database="database_value", + display_name="display_name_value", + entitlement_id="entitlement_id_value", + admin_password="admin_password_value", + network="network_value", + cidr="cidr_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autonomous_database.AutonomousDatabase) + assert response.name == "name_value" + assert response.database == "database_value" + assert response.display_name == "display_name_value" + assert response.entitlement_id == "entitlement_id_value" + assert response.admin_password == "admin_password_value" + assert response.network == "network_value" + assert response.cidr == "cidr_value" + + +def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autonomous_database + ] = mock_rpc + + request = {} + client.get_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_autonomous_database_rest_required_fields( + request_type=oracledatabase.GetAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_autonomous_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb( + oracledatabase.GetAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autonomous_database.AutonomousDatabase.to_json( + autonomous_database.AutonomousDatabase() + ) + + request = oracledatabase.GetAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autonomous_database.AutonomousDatabase() + + client.get_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetAutonomousDatabaseRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_autonomous_database(request) + + +def test_get_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) + + +def test_get_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autonomous_database( + oracledatabase.GetAutonomousDatabaseRequest(), + name="name_value", + ) + + +def test_get_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateAutonomousDatabaseRequest, + dict, + ], +) +def test_create_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["autonomous_database"] = { + "name": "name_value", + "database": "database_value", + "display_name": "display_name_value", + "entitlement_id": "entitlement_id_value", + "admin_password": "admin_password_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 0.1413, + "cpu_core_count": 1496, + "data_storage_size_tb": 2109, + "data_storage_size_gb": 2096, + "db_workload": 1, + "db_edition": 1, + "character_set": "character_set_value", + "n_character_set": "n_character_set_value", + "private_endpoint_ip": "private_endpoint_ip_value", + "private_endpoint_label": "private_endpoint_label_value", + "db_version": "db_version_value", + "is_auto_scaling_enabled": True, + "is_storage_auto_scaling_enabled": True, + "license_type": 1, + "customer_contacts": [{"email": "email_value"}], + "secret_id": "secret_id_value", + "vault_id": "vault_id_value", + "maintenance_schedule_type": 1, + "mtls_connection_required": True, + "backup_retention_period_days": 2975, + "actual_used_data_storage_size_tb": 0.3366, + "allocated_storage_size_tb": 0.2636, + "apex_details": { + "apex_version": "apex_version_value", + "ords_version": "ords_version_value", + }, + "are_primary_allowlisted_ips_used": True, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "autonomous_container_database_id": "autonomous_container_database_id_value", + "available_upgrade_versions": [ + "available_upgrade_versions_value1", + "available_upgrade_versions_value2", + ], + "connection_strings": { + "all_connection_strings": { + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + }, + "dedicated": "dedicated_value", + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + "profiles": [ + { + "consumer_group": 1, + "display_name": "display_name_value", + "host_format": 1, + "is_regional": True, + "protocol": 1, + "session_mode": 1, + "syntax_format": 1, + "tls_authentication": 1, + "value": "value_value", + } + ], + }, + "connection_urls": { + "apex_uri": "apex_uri_value", + "database_transforms_uri": "database_transforms_uri_value", + "graph_studio_uri": "graph_studio_uri_value", + "machine_learning_notebook_uri": "machine_learning_notebook_uri_value", + "machine_learning_user_management_uri": "machine_learning_user_management_uri_value", + "mongo_db_uri": "mongo_db_uri_value", + "ords_uri": "ords_uri_value", + "sql_dev_web_uri": "sql_dev_web_uri_value", + }, + "failed_data_recovery_duration": {"seconds": 751, "nanos": 543}, + "memory_table_gbs": 1691, + "is_local_data_guard_enabled": True, + "local_adg_auto_failover_max_data_loss_limit": 4513, + "local_standby_db": { + "lag_time_duration": {}, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "data_guard_role_changed_time": {"seconds": 751, "nanos": 543}, + "disaster_recovery_role_changed_time": {}, + }, + "memory_per_oracle_compute_unit_gbs": 3626, + "local_disaster_recovery_type": 1, + "data_safe_state": 1, + "database_management_state": 1, + "open_mode": 1, + "operations_insights_state": 1, + "peer_db_ids": ["peer_db_ids_value1", "peer_db_ids_value2"], + "permission_level": 1, + "private_endpoint": "private_endpoint_value", + "refreshable_mode": 1, + "refreshable_state": 1, + "role": 1, + "scheduled_operation_details": [ + { + "day_of_week": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "stop_time": {}, + } + ], + "sql_web_developer_url": "sql_web_developer_url_value", + "supported_clone_regions": [ + "supported_clone_regions_value1", + "supported_clone_regions_value2", + ], + "used_data_storage_size_tbs": 2752, + "oci_url": "oci_url_value", + "total_auto_backup_storage_size_gbs": 0.36100000000000004, + "next_long_term_backup_time": {}, + "maintenance_begin_time": {}, + "maintenance_end_time": {}, + }, + "labels": {}, + "network": "network_value", + "cidr": "cidr_value", + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields[ + "autonomous_database" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autonomous_database"][field])): + del request_init["autonomous_database"][field][i][subfield] + else: + del request_init["autonomous_database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_autonomous_database + ] = mock_rpc + + request = {} + client.create_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_autonomous_database_rest_required_fields( + request_type=oracledatabase.CreateAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["autonomous_database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "autonomousDatabaseId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "autonomousDatabaseId" in jsonified_request + assert ( + jsonified_request["autonomousDatabaseId"] + == request_init["autonomous_database_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["autonomousDatabaseId"] = "autonomous_database_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "autonomous_database_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "autonomousDatabaseId" in jsonified_request + assert jsonified_request["autonomousDatabaseId"] == "autonomous_database_id_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_autonomous_database(request) + + expected_params = [ + ( + "autonomousDatabaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "autonomousDatabaseId", + "requestId", + ) + ) + & set( + ( + "parent", + "autonomousDatabaseId", + "autonomousDatabase", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb( + oracledatabase.CreateAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.CreateAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateAutonomousDatabaseRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_autonomous_database(request) + + +def test_create_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) + + +def test_create_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_autonomous_database( + oracledatabase.CreateAutonomousDatabaseRequest(), + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", + ) + + +def test_create_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteAutonomousDatabaseRequest, + dict, + ], +) +def test_delete_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_autonomous_database + ] = mock_rpc + + request = {} + client.delete_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_autonomous_database_rest_required_fields( + request_type=oracledatabase.DeleteAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_autonomous_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb( + oracledatabase.DeleteAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.DeleteAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteAutonomousDatabaseRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_autonomous_database(request) + + +def test_delete_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_autonomous_database( + oracledatabase.DeleteAutonomousDatabaseRequest(), + name="name_value", + ) + + +def test_delete_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.RestoreAutonomousDatabaseRequest, + dict, + ], +) +def test_restore_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.restore_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.restore_autonomous_database + ] = mock_rpc + + request = {} + client.restore_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_autonomous_database_rest_required_fields( + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_autonomous_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "restoreTime", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb( + oracledatabase.RestoreAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.RestoreAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_autonomous_database_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_autonomous_database(request) + + +def test_restore_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + restore_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restore_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" + % client.transport._host, + args[1], + ) + + +def test_restore_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_autonomous_database( + oracledatabase.RestoreAutonomousDatabaseRequest(), + name="name_value", + restore_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_restore_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + dict, + ], +) +def test_generate_autonomous_database_wallet_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse( + archive_content=b"archive_content_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_autonomous_database_wallet(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, oracledatabase.GenerateAutonomousDatabaseWalletResponse) + assert response.archive_content == b"archive_content_blob" + + +def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_autonomous_database_wallet + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_autonomous_database_wallet + ] = mock_rpc + + request = {} + client.generate_autonomous_database_wallet(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_autonomous_database_wallet(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_autonomous_database_wallet_rest_required_fields( + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request_init["password"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["password"] = "password_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "password" in jsonified_request + assert jsonified_request["password"] == "password_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_autonomous_database_wallet(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_autonomous_database_wallet_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.generate_autonomous_database_wallet._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "password", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_generate_autonomous_database_wallet", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_generate_autonomous_database_wallet", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + oracledatabase.GenerateAutonomousDatabaseWalletRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json( + oracledatabase.GenerateAutonomousDatabaseWalletResponse() + ) + ) + + request = oracledatabase.GenerateAutonomousDatabaseWalletRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + + client.generate_autonomous_database_wallet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_autonomous_database_wallet_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_autonomous_database_wallet(request) + + +def test_generate_autonomous_database_wallet_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + type_=autonomous_database.GenerateType.ALL, + is_regional=True, + password="password_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_autonomous_database_wallet(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" + % client.transport._host, + args[1], + ) + + +def test_generate_autonomous_database_wallet_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_autonomous_database_wallet( + oracledatabase.GenerateAutonomousDatabaseWalletRequest(), + name="name_value", + type_=autonomous_database.GenerateType.ALL, + is_regional=True, + password="password_value", + ) + + +def test_generate_autonomous_database_wallet_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDbVersionsRequest, + dict, + ], +) +def test_list_autonomous_db_versions_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_db_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDbVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_db_versions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_db_versions + ] = mock_rpc + + request = {} + client.list_autonomous_db_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_db_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_db_versions_rest_required_fields( + request_type=oracledatabase.ListAutonomousDbVersionsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_db_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_db_versions_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_autonomous_db_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb( + oracledatabase.ListAutonomousDbVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDbVersionsResponse.to_json( + oracledatabase.ListAutonomousDbVersionsResponse() + ) + ) + + request = oracledatabase.ListAutonomousDbVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDbVersionsResponse() + + client.list_autonomous_db_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_db_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDbVersionsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_db_versions(request) + + +def test_list_autonomous_db_versions_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_db_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_db_versions_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_db_versions( + oracledatabase.ListAutonomousDbVersionsRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_db_versions_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_db_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autonomous_db_version.AutonomousDbVersion) for i in results + ) + + pages = list(client.list_autonomous_db_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + dict, + ], +) +def test_list_autonomous_database_character_sets_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_database_character_sets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabaseCharacterSetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_database_character_sets + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_character_sets + ] = mock_rpc + + request = {} + client.list_autonomous_database_character_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_database_character_sets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_database_character_sets_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_database_character_sets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_database_character_sets_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_autonomous_database_character_sets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_database_character_sets_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_character_sets", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_autonomous_database_character_sets", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + ) + ) + + request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + + client.list_autonomous_database_character_sets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_database_character_sets_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_database_character_sets(request) + + +def test_list_autonomous_database_character_sets_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_database_character_sets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_database_character_sets_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_database_character_sets( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_database_character_sets_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_database_character_sets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance( + i, autonomous_database_character_set.AutonomousDatabaseCharacterSet + ) + for i in results + ) + + pages = list( + client.list_autonomous_database_character_sets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseBackupsRequest, + dict, + ], +) +def test_list_autonomous_database_backups_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_database_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabaseBackupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_database_backups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_backups + ] = mock_rpc + + request = {} + client.list_autonomous_database_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_database_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_database_backups_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_database_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_database_backups_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_autonomous_database_backups._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_backups", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb( + oracledatabase.ListAutonomousDatabaseBackupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json( + oracledatabase.ListAutonomousDatabaseBackupsResponse() + ) + ) + + request = oracledatabase.ListAutonomousDatabaseBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + + client.list_autonomous_database_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_database_backups_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_database_backups(request) + + +def test_list_autonomous_database_backups_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_database_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_database_backups_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_database_backups( + oracledatabase.ListAutonomousDatabaseBackupsRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_database_backups_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_database_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) + for i in results + ) + + pages = list( + client.list_autonomous_database_backups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OracleDatabaseClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OracleDatabaseRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = OracleDatabaseClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_oracle_database_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OracleDatabaseTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_oracle_database_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OracleDatabaseTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_cloud_exadata_infrastructures", + "get_cloud_exadata_infrastructure", + "create_cloud_exadata_infrastructure", + "delete_cloud_exadata_infrastructure", + "list_cloud_vm_clusters", + "get_cloud_vm_cluster", + "create_cloud_vm_cluster", + "delete_cloud_vm_cluster", + "list_entitlements", + "list_db_servers", + "list_db_nodes", + "list_gi_versions", + "list_db_system_shapes", + "list_autonomous_databases", + "get_autonomous_database", + "create_autonomous_database", + "delete_autonomous_database", + "restore_autonomous_database", + "generate_autonomous_database_wallet", + "list_autonomous_db_versions", + "list_autonomous_database_character_sets", + "list_autonomous_database_backups", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_oracle_database_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OracleDatabaseTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_oracle_database_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OracleDatabaseTransport() + adc.assert_called_once() + + +def test_oracle_database_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OracleDatabaseClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_oracle_database_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OracleDatabaseRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_oracle_database_rest_lro_client(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_oracle_database_host_no_port(transport_name): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_oracle_database_host_with_port(transport_name): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "oracledatabase.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_oracle_database_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OracleDatabaseClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OracleDatabaseClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_cloud_exadata_infrastructures._session + session2 = client2.transport.list_cloud_exadata_infrastructures._session + assert session1 != session2 + session1 = client1.transport.get_cloud_exadata_infrastructure._session + session2 = client2.transport.get_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.create_cloud_exadata_infrastructure._session + session2 = client2.transport.create_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.delete_cloud_exadata_infrastructure._session + session2 = client2.transport.delete_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.list_cloud_vm_clusters._session + session2 = client2.transport.list_cloud_vm_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cloud_vm_cluster._session + session2 = client2.transport.get_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cloud_vm_cluster._session + session2 = client2.transport.create_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cloud_vm_cluster._session + session2 = client2.transport.delete_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.list_entitlements._session + session2 = client2.transport.list_entitlements._session + assert session1 != session2 + session1 = client1.transport.list_db_servers._session + session2 = client2.transport.list_db_servers._session + assert session1 != session2 + session1 = client1.transport.list_db_nodes._session + session2 = client2.transport.list_db_nodes._session + assert session1 != session2 + session1 = client1.transport.list_gi_versions._session + session2 = client2.transport.list_gi_versions._session + assert session1 != session2 + session1 = client1.transport.list_db_system_shapes._session + session2 = client2.transport.list_db_system_shapes._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_databases._session + session2 = client2.transport.list_autonomous_databases._session + assert session1 != session2 + session1 = client1.transport.get_autonomous_database._session + session2 = client2.transport.get_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.create_autonomous_database._session + session2 = client2.transport.create_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.delete_autonomous_database._session + session2 = client2.transport.delete_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.restore_autonomous_database._session + session2 = client2.transport.restore_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.generate_autonomous_database_wallet._session + session2 = client2.transport.generate_autonomous_database_wallet._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_db_versions._session + session2 = client2.transport.list_autonomous_db_versions._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_database_character_sets._session + session2 = client2.transport.list_autonomous_database_character_sets._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_database_backups._session + session2 = client2.transport.list_autonomous_database_backups._session + assert session1 != session2 + + +def test_autonomous_database_path(): + project = "squid" + location = "clam" + autonomous_database = "whelk" + expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) + actual = OracleDatabaseClient.autonomous_database_path( + project, location, autonomous_database + ) + assert expected == actual + + +def test_parse_autonomous_database_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autonomous_database": "nudibranch", + } + path = OracleDatabaseClient.autonomous_database_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_path(path) + assert expected == actual + + +def test_autonomous_database_backup_path(): + project = "cuttlefish" + location = "mussel" + autonomous_database_backup = "winkle" + expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) + actual = OracleDatabaseClient.autonomous_database_backup_path( + project, location, autonomous_database_backup + ) + assert expected == actual + + +def test_parse_autonomous_database_backup_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "autonomous_database_backup": "abalone", + } + path = OracleDatabaseClient.autonomous_database_backup_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_backup_path(path) + assert expected == actual + + +def test_autonomous_database_character_set_path(): + project = "squid" + location = "clam" + autonomous_database_character_set = "whelk" + expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) + actual = OracleDatabaseClient.autonomous_database_character_set_path( + project, location, autonomous_database_character_set + ) + assert expected == actual + + +def test_parse_autonomous_database_character_set_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autonomous_database_character_set": "nudibranch", + } + path = OracleDatabaseClient.autonomous_database_character_set_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_character_set_path(path) + assert expected == actual + + +def test_autonomous_db_version_path(): + project = "cuttlefish" + location = "mussel" + autonomous_db_version = "winkle" + expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) + actual = OracleDatabaseClient.autonomous_db_version_path( + project, location, autonomous_db_version + ) + assert expected == actual + + +def test_parse_autonomous_db_version_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "autonomous_db_version": "abalone", + } + path = OracleDatabaseClient.autonomous_db_version_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_db_version_path(path) + assert expected == actual + + +def test_cloud_exadata_infrastructure_path(): + project = "squid" + location = "clam" + cloud_exadata_infrastructure = "whelk" + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) + actual = OracleDatabaseClient.cloud_exadata_infrastructure_path( + project, location, cloud_exadata_infrastructure + ) + assert expected == actual + + +def test_parse_cloud_exadata_infrastructure_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cloud_exadata_infrastructure": "nudibranch", + } + path = OracleDatabaseClient.cloud_exadata_infrastructure_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_cloud_exadata_infrastructure_path(path) + assert expected == actual + + +def test_cloud_vm_cluster_path(): + project = "cuttlefish" + location = "mussel" + cloud_vm_cluster = "winkle" + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) + actual = OracleDatabaseClient.cloud_vm_cluster_path( + project, location, cloud_vm_cluster + ) + assert expected == actual + + +def test_parse_cloud_vm_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cloud_vm_cluster": "abalone", + } + path = OracleDatabaseClient.cloud_vm_cluster_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_cloud_vm_cluster_path(path) + assert expected == actual + + +def test_db_node_path(): + project = "squid" + location = "clam" + cloud_vm_cluster = "whelk" + db_node = "octopus" + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) + actual = OracleDatabaseClient.db_node_path( + project, location, cloud_vm_cluster, db_node + ) + assert expected == actual + + +def test_parse_db_node_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "cloud_vm_cluster": "cuttlefish", + "db_node": "mussel", + } + path = OracleDatabaseClient.db_node_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_node_path(path) + assert expected == actual + + +def test_db_server_path(): + project = "winkle" + location = "nautilus" + cloud_exadata_infrastructure = "scallop" + db_server = "abalone" + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) + actual = OracleDatabaseClient.db_server_path( + project, location, cloud_exadata_infrastructure, db_server + ) + assert expected == actual + + +def test_parse_db_server_path(): + expected = { + "project": "squid", + "location": "clam", + "cloud_exadata_infrastructure": "whelk", + "db_server": "octopus", + } + path = OracleDatabaseClient.db_server_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_server_path(path) + assert expected == actual + + +def test_db_system_shape_path(): + project = "oyster" + location = "nudibranch" + db_system_shape = "cuttlefish" + expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) + actual = OracleDatabaseClient.db_system_shape_path( + project, location, db_system_shape + ) + assert expected == actual + + +def test_parse_db_system_shape_path(): + expected = { + "project": "mussel", + "location": "winkle", + "db_system_shape": "nautilus", + } + path = OracleDatabaseClient.db_system_shape_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_system_shape_path(path) + assert expected == actual + + +def test_entitlement_path(): + project = "scallop" + location = "abalone" + entitlement = "squid" + expected = ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) + actual = OracleDatabaseClient.entitlement_path(project, location, entitlement) + assert expected == actual + + +def test_parse_entitlement_path(): + expected = { + "project": "clam", + "location": "whelk", + "entitlement": "octopus", + } + path = OracleDatabaseClient.entitlement_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_entitlement_path(path) + assert expected == actual + + +def test_gi_version_path(): + project = "oyster" + location = "nudibranch" + gi_version = "cuttlefish" + expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) + actual = OracleDatabaseClient.gi_version_path(project, location, gi_version) + assert expected == actual + + +def test_parse_gi_version_path(): + expected = { + "project": "mussel", + "location": "winkle", + "gi_version": "nautilus", + } + path = OracleDatabaseClient.gi_version_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_gi_version_path(path) + assert expected == actual + + +def test_network_path(): + project = "scallop" + network = "abalone" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = OracleDatabaseClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "squid", + "network": "clam", + } + path = OracleDatabaseClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OracleDatabaseClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = OracleDatabaseClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OracleDatabaseClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = OracleDatabaseClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OracleDatabaseClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = OracleDatabaseClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = OracleDatabaseClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = OracleDatabaseClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OracleDatabaseClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = OracleDatabaseClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OracleDatabaseClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 0d350038411bbdcf10eb7fb6820084abcb362c5a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 13:17:47 -0400 Subject: [PATCH 36/59] feat: [google-cloud-dataproc] add support for Spark Connect sessions in Dataproc Serverless for Spark (#13106) BEGIN_COMMIT_OVERRIDE feat: add support for Spark Connect sessions in Dataproc Serverless for Spark docs: update docs for `filter` field in `ListSessionsRequest` END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: update docs for `filter` field in `ListSessionsRequest` PiperOrigin-RevId: 678438691 Source-Link: https://github.com/googleapis/googleapis/commit/5c181aaf78bd1ae2e08c3a2971cd9e87b6e00986 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3b37678e3ed4e52f2a122ea91738a9e7b5f4cba1 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiIzYjM3Njc4ZTNlZDRlNTJmMmExMjJlYTkxNzM4YTllN2I1ZjRjYmExIn0= --------- Co-authored-by: Owl Bot --- .../google/cloud/dataproc/__init__.py | 2 ++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/__init__.py | 2 ++ .../google/cloud/dataproc_v1/gapic_version.py | 2 +- .../cloud/dataproc_v1/types/__init__.py | 2 ++ .../dataproc_v1/types/session_templates.py | 15 +++++++++++ .../cloud/dataproc_v1/types/sessions.py | 27 +++++++++++++++++-- ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../dataproc_v1/test_session_controller.py | 1 + .../test_session_template_controller.py | 2 ++ 10 files changed, 52 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py index 1c45dca78fda..5df4195f42c4 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -189,6 +189,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from google.cloud.dataproc_v1.types.shared import ( @@ -362,6 +363,7 @@ "ListSessionsRequest", "ListSessionsResponse", "Session", + "SparkConnectConfig", "TerminateSessionRequest", "AutotuningConfig", "EnvironmentConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 435e79ea7a30..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.12.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py index e89772784679..1a6bbd78319e 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -169,6 +169,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from .types.shared import ( @@ -353,6 +354,7 @@ "ShieldedInstanceConfig", "SoftwareConfig", "SparkBatch", + "SparkConnectConfig", "SparkHistoryServerConfig", "SparkJob", "SparkRBatch", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 435e79ea7a30..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.12.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py index 2bf4fcd11209..62dce7408efa 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -138,6 +138,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from .shared import ( @@ -295,6 +296,7 @@ "ListSessionsRequest", "ListSessionsResponse", "Session", + "SparkConnectConfig", "TerminateSessionRequest", "AutotuningConfig", "EnvironmentConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py index 60c792b58b5b..66125b2cb461 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py @@ -175,6 +175,11 @@ class DeleteSessionTemplateRequest(proto.Message): class SessionTemplate(proto.Message): r"""A representation of a session template. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -189,6 +194,10 @@ class SessionTemplate(proto.Message): jupyter_session (google.cloud.dataproc_v1.types.JupyterConfig): Optional. Jupyter session config. + This field is a member of `oneof`_ ``session_config``. + spark_connect_session (google.cloud.dataproc_v1.types.SparkConnectConfig): + Optional. Spark Connect session config. + This field is a member of `oneof`_ ``session_config``. creator (str): Output only. The email address of the user @@ -236,6 +245,12 @@ class SessionTemplate(proto.Message): oneof="session_config", message=sessions.JupyterConfig, ) + spark_connect_session: sessions.SparkConnectConfig = proto.Field( + proto.MESSAGE, + number=11, + oneof="session_config", + message=sessions.SparkConnectConfig, + ) creator: str = proto.Field( proto.STRING, number=5, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py index 2a5b2cc5d248..1ab37c9ec32c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py @@ -33,6 +33,7 @@ "DeleteSessionRequest", "Session", "JupyterConfig", + "SparkConnectConfig", }, ) @@ -125,13 +126,16 @@ class ListSessionsRequest(proto.Message): various fields in each session resource. Filters are case sensitive, and may contain multiple clauses combined with logical operators (AND, OR). Supported fields are - ``session_id``, ``session_uuid``, ``state``, and - ``create_time``. + ``session_id``, ``session_uuid``, ``state``, + ``create_time``, and ``labels``. Example: ``state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`` is a filter for sessions in an ACTIVE state that were created before 2023-01-01. + ``state = ACTIVE and labels.environment=production`` is a + filter for sessions in an ACTIVE state that have a + production environment label. See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed description of the filter syntax and a list @@ -249,6 +253,11 @@ class DeleteSessionRequest(proto.Message): class Session(proto.Message): r"""A representation of a session. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -264,6 +273,10 @@ class Session(proto.Message): jupyter_session (google.cloud.dataproc_v1.types.JupyterConfig): Optional. Jupyter session config. + This field is a member of `oneof`_ ``session_config``. + spark_connect_session (google.cloud.dataproc_v1.types.SparkConnectConfig): + Optional. Spark Connect session config. + This field is a member of `oneof`_ ``session_config``. runtime_info (google.cloud.dataproc_v1.types.RuntimeInfo): Output only. Runtime information about @@ -388,6 +401,12 @@ class SessionStateHistory(proto.Message): oneof="session_config", message="JupyterConfig", ) + spark_connect_session: "SparkConnectConfig" = proto.Field( + proto.MESSAGE, + number=17, + oneof="session_config", + message="SparkConnectConfig", + ) runtime_info: shared.RuntimeInfo = proto.Field( proto.MESSAGE, number=6, @@ -478,4 +497,8 @@ class Kernel(proto.Enum): ) +class SparkConnectConfig(proto.Message): + r"""Spark Connect configuration for an interactive session.""" + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index a44d5d6db9b3..c5f4e003db04 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.12.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index f5dbfb565af8..7cc868611e6a 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -3293,6 +3293,7 @@ def test_create_session_rest(request_type): "uuid": "uuid_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "runtime_info": { "endpoints": {}, "output_uri": "output_uri_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index b157306093fc..0da9f81f5ffe 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -3412,6 +3412,7 @@ def test_create_session_template_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "creator": "creator_value", "labels": {}, "runtime_config": { @@ -3850,6 +3851,7 @@ def test_update_session_template_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "creator": "creator_value", "labels": {}, "runtime_config": { From 89e859b4741d8d4eca7065eb095a9c3ce873d733 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:22:38 -0700 Subject: [PATCH 37/59] chore: Update release-please config files (#13103) Update release-please config files --- .release-please-manifest.json | 1 + release-please-config.json | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9b7f01b6663b..37cf389b81dc 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -117,6 +117,7 @@ "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", + "packages/google-cloud-oracledatabase": "0.0.0", "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", diff --git a/release-please-config.json b/release-please-config.json index ca70cf6baec5..7866aa34f750 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -2028,6 +2028,21 @@ ], "release-type": "python" }, + "packages/google-cloud-oracledatabase": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-oracledatabase", + "extra-files": [ + "google/cloud/oracledatabase/gapic_version.py", + "google/cloud/oracledatabase_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-cloud-orchestration-airflow": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, From 32b254c110626aff2194aceb93f131f745cfcf29 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 14:43:55 -0400 Subject: [PATCH 38/59] feat: [google-maps-routeoptimization] Add support for generating route tokens (#13105) BEGIN_COMMIT_OVERRIDE feat: Add support for generating route tokens feat: A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed PiperOrigin-RevId: 677952232 Source-Link: https://github.com/googleapis/googleapis/commit/534e49c0ca0b9297f4ede6f119a0db054b35dd1e Source-Link: https://github.com/googleapis/googleapis-gen/commit/da6e35d31b0de9ddbaa97bd964899fbb9b1c000c Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcm91dGVvcHRpbWl6YXRpb24vLk93bEJvdC55YW1sIiwiaCI6ImRhNmUzNWQzMWIwZGU5ZGRiYWE5N2JkOTY0ODk5ZmJiOWIxYzAwMGMifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../maps/routeoptimization/gapic_version.py | 2 +- .../routeoptimization_v1/gapic_version.py | 2 +- .../route_optimization/async_client.py | 21 +- .../services/route_optimization/client.py | 21 +- .../route_optimization/transports/grpc.py | 21 +- .../transports/grpc_asyncio.py | 21 +- .../types/route_optimization_service.py | 372 ++---------------- ...data_google.maps.routeoptimization.v1.json | 2 +- 8 files changed, 70 insertions(+), 392 deletions(-) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py index 3c899f1f772a..73dc5e7358cc 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py @@ -403,25 +403,26 @@ async def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. .. code-block:: python diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py index c88ee1b4892f..93b71fd86ac4 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py @@ -805,25 +805,26 @@ def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. .. code-block:: python diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py index 8c5621a5f0ba..79f73aece6fa 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py @@ -329,25 +329,26 @@ def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. Returns: Callable[[~.BatchOptimizeToursRequest], diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py index edf1e396fe6f..285c3188cd2e 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py @@ -336,25 +336,26 @@ def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. Returns: Callable[[~.BatchOptimizeToursRequest], diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py index 0dd1de2b1f9d..6a4e3811952e 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py @@ -338,7 +338,8 @@ class OptimizeToursRequest(proto.Message): If true, polylines will be populated in response ``ShipmentRoute``\ s. populate_transition_polylines (bool): - If true, polylines will be populated in response + If true, polylines and route tokens will be populated in + response [ShipmentRoute.transitions][google.maps.routeoptimization.v1.ShipmentRoute.transitions]. allow_large_deadline_despite_interruption_risk (bool): If this is set, then the request can have a @@ -2937,6 +2938,18 @@ class Transition(proto.Message): [populate_transition_polylines] [google.maps.routeoptimization.v1.OptimizeToursRequest.populate_transition_polylines] is set to true. + route_token (str): + Output only. An opaque token that can be passed to + `Navigation + SDK `__ + to reconstruct the route during navigation, and, in the + event of rerouting, honor the original intention when the + route was created. Treat this token as an opaque blob. Don't + compare its value across requests as its value may change + even if the service returns the exact same route. This field + is only populated if [populate_transition_polylines] + [google.maps.routeoptimization.v1.OptimizeToursRequest.populate_transition_polylines] + is set to true. vehicle_loads (MutableMapping[str, google.maps.routeoptimization_v1.types.ShipmentRoute.VehicleLoad]): Vehicle loads during this transition, for each type that either appears in this vehicle's @@ -2995,6 +3008,10 @@ class Transition(proto.Message): number=9, message="ShipmentRoute.EncodedPolyline", ) + route_token: str = proto.Field( + proto.STRING, + number=12, + ) vehicle_loads: MutableMapping[ str, "ShipmentRoute.VehicleLoad" ] = proto.MapField( @@ -3562,7 +3579,8 @@ class OptimizeToursValidationError(proto.Message): A validation error is defined by the pair (``code``, ``display_name``) which are always present. - Other fields (below) provide more context about the error. + The fields following this section provide more context about + the error. *MULTIPLE ERRORS*: When there are multiple errors, the validation process tries to output several of them. Much @@ -3570,358 +3588,14 @@ class OptimizeToursValidationError(proto.Message): validation errors will be "fatal", meaning that they stop the entire validation process. This is the case for ``display_name="UNSPECIFIED"`` errors, among others. Some - may cause the validation process to skip other errors. + errors may cause the validation process to skip other + errors. *STABILITY*: ``code`` and ``display_name`` should be very stable. But new codes and display names may appear over time, which may cause a given (invalid) request to yield a different (``code``, ``display_name``) pair because the new - error hid the old one (see "MULTIPLE ERRORS"). - - *REFERENCE*: A list of all (code, name) pairs: - - - UNSPECIFIED = 0; - - - VALIDATION_TIMEOUT_ERROR = 10; Validation couldn't be - completed within the deadline. - - - REQUEST_OPTIONS_ERROR = 12; - - - REQUEST_OPTIONS_INVALID_SOLVING_MODE = 1201; - - REQUEST_OPTIONS_INVALID_MAX_VALIDATION_ERRORS = 1203; - - REQUEST_OPTIONS_INVALID_GEODESIC_METERS_PER_SECOND = - 1204; - - REQUEST_OPTIONS_GEODESIC_METERS_PER_SECOND_TOO_SMALL = - 1205; - - REQUEST_OPTIONS_MISSING_GEODESIC_METERS_PER_SECOND = - 1206; - - REQUEST_OPTIONS_POPULATE_PATHFINDER_TRIPS_AND_GEODESIC_DISTANCE - = 1207; - - REQUEST_OPTIONS_COST_MODEL_OPTIONS_AND_GEODESIC_DISTANCE - = 1208; - - REQUEST_OPTIONS_TRAVEL_MODE_INCOMPATIBLE_WITH_TRAFFIC - = 1211; - - REQUEST_OPTIONS_MULTIPLE_TRAFFIC_FLAVORS = 1212; - - REQUEST_OPTIONS_INVALID_TRAFFIC_FLAVOR = 1213; - - REQUEST_OPTIONS_TRAFFIC_ENABLED_WITHOUT_GLOBAL_START_TIME - = 1214; - - REQUEST_OPTIONS_TRAFFIC_ENABLED_WITH_PRECEDENCES = - 1215; - - REQUEST_OPTIONS_TRAFFIC_PREFILL_MODE_INVALID = 1216; - - REQUEST_OPTIONS_TRAFFIC_PREFILL_ENABLED_WITHOUT_TRAFFIC - = 1217; - - - INJECTED_SOLUTION_ERROR = 20; - - - INJECTED_SOLUTION_MISSING_LABEL = 2000; - - INJECTED_SOLUTION_DUPLICATE_LABEL = 2001; - - INJECTED_SOLUTION_AMBIGUOUS_INDEX = 2002; - - INJECTED_SOLUTION_INFEASIBLE_AFTER_GETTING_TRAVEL_TIMES - = 2003; - - INJECTED_SOLUTION_TRANSITION_INCONSISTENT_WITH_ACTUAL_TRAVEL - = 2004; - - INJECTED_SOLUTION_CONCURRENT_SOLUTION_TYPES = 2005; - - INJECTED_SOLUTION_MORE_THAN_ONE_PER_TYPE = 2006; - - INJECTED_SOLUTION_REFRESH_WITHOUT_POPULATE = 2008; - - INJECTED_SOLUTION_CONSTRAINED_ROUTE_PORTION_INFEASIBLE - = 2010; - - - SHIPMENT_MODEL_ERROR = 22; - - - SHIPMENT_MODEL_TOO_LARGE = 2200; - - SHIPMENT_MODEL_TOO_MANY_CAPACITY_TYPES = 2201; - - SHIPMENT_MODEL_GLOBAL_START_TIME_NEGATIVE_OR_NAN = - 2202; - - SHIPMENT_MODEL_GLOBAL_END_TIME_TOO_LARGE_OR_NAN = - 2203; - - SHIPMENT_MODEL_GLOBAL_START_TIME_AFTER_GLOBAL_END_TIME - = 2204; - - SHIPMENT_MODEL_GLOBAL_DURATION_TOO_LONG = 2205; - - SHIPMENT_MODEL_MAX_ACTIVE_VEHICLES_NOT_POSITIVE = - 2206; - - SHIPMENT_MODEL_DURATION_MATRIX_TOO_LARGE = 2207; - - - INDEX_ERROR = 24; - - - TAG_ERROR = 26; - - - TIME_WINDOW_ERROR = 28; - - - TIME_WINDOW_INVALID_START_TIME = 2800; - - TIME_WINDOW_INVALID_END_TIME = 2801; - - TIME_WINDOW_INVALID_SOFT_START_TIME = 2802; - - TIME_WINDOW_INVALID_SOFT_END_TIME = 2803; - - TIME_WINDOW_OUTSIDE_GLOBAL_TIME_WINDOW = 2804; - - TIME_WINDOW_START_TIME_AFTER_END_TIME = 2805; - - TIME_WINDOW_INVALID_COST_PER_HOUR_BEFORE_SOFT_START_TIME - = 2806; - - TIME_WINDOW_INVALID_COST_PER_HOUR_AFTER_SOFT_END_TIME - = 2807; - - TIME_WINDOW_COST_BEFORE_SOFT_START_TIME_WITHOUT_SOFT_START_TIME - = 2808; - - TIME_WINDOW_COST_AFTER_SOFT_END_TIME_WITHOUT_SOFT_END_TIME - = 2809; - - TIME_WINDOW_SOFT_START_TIME_WITHOUT_COST_BEFORE_SOFT_START_TIME - = 2810; - - TIME_WINDOW_SOFT_END_TIME_WITHOUT_COST_AFTER_SOFT_END_TIME - = 2811; - - TIME_WINDOW_OVERLAPPING_ADJACENT_OR_EARLIER_THAN_PREVIOUS - = 2812; - - TIME_WINDOW_START_TIME_AFTER_SOFT_START_TIME = 2813; - - TIME_WINDOW_SOFT_START_TIME_OUTSIDE_GLOBAL_TIME_WINDOW - = 2819; - - TIME_WINDOW_SOFT_END_TIME_OUTSIDE_GLOBAL_TIME_WINDOW = - 2820; - - TIME_WINDOW_SOFT_END_TIME_AFTER_END_TIME = 2816; - - TIME_WINDOW_COST_BEFORE_SOFT_START_TIME_SET_AND_MULTIPLE_WINDOWS - = 2817; - - TIME_WINDOW_COST_AFTER_SOFT_END_TIME_SET_AND_MULTIPLE_WINDOWS - = 2818; - - TRANSITION_ATTRIBUTES_ERROR = 30; - - TRANSITION_ATTRIBUTES_INVALID_COST = 3000; - - TRANSITION_ATTRIBUTES_INVALID_COST_PER_KILOMETER = - 3001; - - TRANSITION_ATTRIBUTES_DUPLICATE_TAG_PAIR = 3002; - - TRANSITION_ATTRIBUTES_DISTANCE_LIMIT_MAX_METERS_UNSUPPORTED - = 3003; - - TRANSITION_ATTRIBUTES_UNSPECIFIED_SOURCE_TAGS = 3004; - - TRANSITION_ATTRIBUTES_CONFLICTING_SOURCE_TAGS_FIELDS = - 3005; - - TRANSITION_ATTRIBUTES_UNSPECIFIED_DESTINATION_TAGS = - 3006; - - TRANSITION_ATTRIBUTES_CONFLICTING_DESTINATION_TAGS_FIELDS - = 3007; - - TRANSITION_ATTRIBUTES_DELAY_DURATION_NEGATIVE_OR_NAN = - 3008; - - TRANSITION_ATTRIBUTES_DELAY_DURATION_EXCEEDS_GLOBAL_DURATION - = 3009; - - - AMOUNT_ERROR = 31; - - - AMOUNT_NEGATIVE_VALUE = 3100; - - - LOAD_LIMIT_ERROR = 33; - - - LOAD_LIMIT_INVALID_COST_ABOVE_SOFT_MAX = 3303; - - LOAD_LIMIT_SOFT_MAX_WITHOUT_COST_ABOVE_SOFT_MAX = - 3304; - - LOAD_LIMIT_COST_ABOVE_SOFT_MAX_WITHOUT_SOFT_MAX = - 3305; - - LOAD_LIMIT_NEGATIVE_SOFT_MAX = 3306; - - LOAD_LIMIT_MIXED_DEMAND_TYPE = 3307; - - LOAD_LIMIT_MAX_LOAD_NEGATIVE_VALUE = 3308; - - LOAD_LIMIT_SOFT_MAX_ABOVE_MAX = 3309; - - - INTERVAL_ERROR = 34; - - - INTERVAL_MIN_EXCEEDS_MAX = 3401; - - INTERVAL_NEGATIVE_MIN = 3402; - - INTERVAL_NEGATIVE_MAX = 3403; - - INTERVAL_MIN_EXCEEDS_CAPACITY = 3404; - - INTERVAL_MAX_EXCEEDS_CAPACITY = 3405; - - - DISTANCE_LIMIT_ERROR = 36; - - - DISTANCE_LIMIT_INVALID_COST_AFTER_SOFT_MAX = 3601; - - DISTANCE_LIMIT_SOFT_MAX_WITHOUT_COST_AFTER_SOFT_MAX = - 3602; - - DISTANCE_LIMIT_COST_AFTER_SOFT_MAX_WITHOUT_SOFT_MAX = - 3603; - - DISTANCE_LIMIT_NEGATIVE_MAX = 3604; - - DISTANCE_LIMIT_NEGATIVE_SOFT_MAX = 3605; - - DISTANCE_LIMIT_SOFT_MAX_LARGER_THAN_MAX = 3606; - - - DURATION_LIMIT_ERROR = 38; - - - DURATION_LIMIT_MAX_DURATION_NEGATIVE_OR_NAN = 3800; - - DURATION_LIMIT_SOFT_MAX_DURATION_NEGATIVE_OR_NAN = - 3801; - - DURATION_LIMIT_INVALID_COST_PER_HOUR_AFTER_SOFT_MAX = - 3802; - - DURATION_LIMIT_SOFT_MAX_WITHOUT_COST_AFTER_SOFT_MAX = - 3803; - - DURATION_LIMIT_COST_AFTER_SOFT_MAX_WITHOUT_SOFT_MAX = - 3804; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_DURATION_NEGATIVE_OR_NAN - = 3805; - - DURATION_LIMIT_INVALID_COST_AFTER_QUADRATIC_SOFT_MAX = - 3806; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_WITHOUT_COST_PER_SQUARE_HOUR - = 3807; - - DURATION_LIMIT_COST_PER_SQUARE_HOUR_WITHOUT_QUADRATIC_SOFT_MAX - = 3808; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_WITHOUT_MAX = 3809; - - DURATION_LIMIT_SOFT_MAX_LARGER_THAN_MAX = 3810; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_LARGER_THAN_MAX = - 3811; - - DURATION_LIMIT_DIFF_BETWEEN_MAX_AND_QUADRATIC_SOFT_MAX_TOO_LARGE - = 3812; - - DURATION_LIMIT_MAX_DURATION_EXCEEDS_GLOBAL_DURATION = - 3813; - - DURATION_LIMIT_SOFT_MAX_DURATION_EXCEEDS_GLOBAL_DURATION - = 3814; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_DURATION_EXCEEDS_GLOBAL_DURATION - = 3815; - - - SHIPMENT_ERROR = 40; - - - SHIPMENT_PD_LIMIT_WITHOUT_PICKUP_AND_DELIVERY = 4014; - - SHIPMENT_PD_ABSOLUTE_DETOUR_LIMIT_DURATION_NEGATIVE_OR_NAN - = 4000; - - SHIPMENT_PD_ABSOLUTE_DETOUR_LIMIT_DURATION_EXCEEDS_GLOBAL_DURATION - = 4001; - - SHIPMENT_PD_RELATIVE_DETOUR_LIMIT_INVALID = 4015; - - SHIPMENT_PD_DETOUR_LIMIT_AND_EXTRA_VISIT_DURATION = - 4016; - - SHIPMENT_PD_TIME_LIMIT_DURATION_NEGATIVE_OR_NAN = - 4002; - - SHIPMENT_PD_TIME_LIMIT_DURATION_EXCEEDS_GLOBAL_DURATION - = 4003; - - SHIPMENT_EMPTY_SHIPMENT_TYPE = 4004; - - SHIPMENT_NO_PICKUP_NO_DELIVERY = 4005; - - SHIPMENT_INVALID_PENALTY_COST = 4006; - - SHIPMENT_ALLOWED_VEHICLE_INDEX_OUT_OF_BOUNDS = 4007; - - SHIPMENT_DUPLICATE_ALLOWED_VEHICLE_INDEX = 4008; - - SHIPMENT_INCONSISTENT_COST_FOR_VEHICLE_SIZE_WITHOUT_INDEX - = 4009; - - SHIPMENT_INCONSISTENT_COST_FOR_VEHICLE_SIZE_WITH_INDEX - = 4010; - - SHIPMENT_INVALID_COST_FOR_VEHICLE = 4011; - - SHIPMENT_COST_FOR_VEHICLE_INDEX_OUT_OF_BOUNDS = 4012; - - SHIPMENT_DUPLICATE_COST_FOR_VEHICLE_INDEX = 4013; - - - VEHICLE_ERROR = 42; - - - VEHICLE_EMPTY_REQUIRED_OPERATOR_TYPE = 4200; - - VEHICLE_DUPLICATE_REQUIRED_OPERATOR_TYPE = 4201; - - VEHICLE_NO_OPERATOR_WITH_REQUIRED_OPERATOR_TYPE = - 4202; - - VEHICLE_EMPTY_START_TAG = 4203; - - VEHICLE_DUPLICATE_START_TAG = 4204; - - VEHICLE_EMPTY_END_TAG = 4205; - - VEHICLE_DUPLICATE_END_TAG = 4206; - - VEHICLE_EXTRA_VISIT_DURATION_NEGATIVE_OR_NAN = 4207; - - VEHICLE_EXTRA_VISIT_DURATION_EXCEEDS_GLOBAL_DURATION = - 4208; - - VEHICLE_EXTRA_VISIT_DURATION_EMPTY_KEY = 4209; - - VEHICLE_FIRST_SHIPMENT_INDEX_OUT_OF_BOUNDS = 4210; - - VEHICLE_FIRST_SHIPMENT_IGNORED = 4211; - - VEHICLE_FIRST_SHIPMENT_NOT_BOUND = 4212; - - VEHICLE_LAST_SHIPMENT_INDEX_OUT_OF_BOUNDS = 4213; - - VEHICLE_LAST_SHIPMENT_IGNORED = 4214; - - VEHICLE_LAST_SHIPMENT_NOT_BOUND = 4215; - - VEHICLE_IGNORED_WITH_USED_IF_ROUTE_IS_EMPTY = 4216; - - VEHICLE_INVALID_COST_PER_KILOMETER = 4217; - - VEHICLE_INVALID_COST_PER_HOUR = 4218; - - VEHICLE_INVALID_COST_PER_TRAVELED_HOUR = 4219; - - VEHICLE_INVALID_FIXED_COST = 4220; - - VEHICLE_INVALID_TRAVEL_DURATION_MULTIPLE = 4221; - - VEHICLE_TRAVEL_DURATION_MULTIPLE_WITH_SHIPMENT_PD_DETOUR_LIMITS - = 4223; - - VEHICLE_MATRIX_INDEX_WITH_SHIPMENT_PD_DETOUR_LIMITS = - 4224; - - VEHICLE_MINIMUM_DURATION_LONGER_THAN_DURATION_LIMIT = - 4222; - - - VISIT_REQUEST_ERROR = 44; - - - VISIT_REQUEST_EMPTY_TAG = 4400; - - VISIT_REQUEST_DUPLICATE_TAG = 4401; - - VISIT_REQUEST_DURATION_NEGATIVE_OR_NAN = 4404; - - VISIT_REQUEST_DURATION_EXCEEDS_GLOBAL_DURATION = 4405; - - - PRECEDENCE_ERROR = 46; - - - PRECEDENCE_RULE_MISSING_FIRST_INDEX = 4600; - - PRECEDENCE_RULE_MISSING_SECOND_INDEX = 4601; - - PRECEDENCE_RULE_FIRST_INDEX_OUT_OF_BOUNDS = 4602; - - PRECEDENCE_RULE_SECOND_INDEX_OUT_OF_BOUNDS = 4603; - - PRECEDENCE_RULE_DUPLICATE_INDEX = 4604; - - PRECEDENCE_RULE_INEXISTENT_FIRST_VISIT_REQUEST = 4605; - - PRECEDENCE_RULE_INEXISTENT_SECOND_VISIT_REQUEST = - 4606; - - - BREAK_ERROR = 48; - - - BREAK_RULE_EMPTY = 4800; - - BREAK_REQUEST_UNSPECIFIED_DURATION = 4801; - - BREAK_REQUEST_UNSPECIFIED_EARLIEST_START_TIME = 4802; - - BREAK_REQUEST_UNSPECIFIED_LATEST_START_TIME = 4803; - - BREAK_REQUEST_DURATION_NEGATIVE_OR_NAN = 4804; = 4804; - - BREAK_REQUEST_LATEST_START_TIME_BEFORE_EARLIEST_START_TIME - = 4805; - - BREAK_REQUEST_EARLIEST_START_TIME_BEFORE_GLOBAL_START_TIME - = 4806; - - BREAK_REQUEST_LATEST_END_TIME_AFTER_GLOBAL_END_TIME = - 4807; - - BREAK_REQUEST_NON_SCHEDULABLE = 4808; - - BREAK_FREQUENCY_MAX_INTER_BREAK_DURATION_NEGATIVE_OR_NAN - = 4809; - - BREAK_FREQUENCY_MIN_BREAK_DURATION_NEGATIVE_OR_NAN = - 4810; - - BREAK_FREQUENCY_MIN_BREAK_DURATION_EXCEEDS_GLOBAL_DURATION - = 4811; - - BREAK_FREQUENCY_MAX_INTER_BREAK_DURATION_EXCEEDS_GLOBAL_DURATION - = 4812; - - BREAK_REQUEST_DURATION_EXCEEDS_GLOBAL_DURATION = 4813; - - BREAK_FREQUENCY_MISSING_MAX_INTER_BREAK_DURATION = - 4814; - - BREAK_FREQUENCY_MISSING_MIN_BREAK_DURATION = 4815; - - - SHIPMENT_TYPE_INCOMPATIBILITY_ERROR = 50; - - - SHIPMENT_TYPE_INCOMPATIBILITY_EMPTY_TYPE = 5001; - - SHIPMENT_TYPE_INCOMPATIBILITY_LESS_THAN_TWO_TYPES = - 5002; - - SHIPMENT_TYPE_INCOMPATIBILITY_DUPLICATE_TYPE = 5003; - - SHIPMENT_TYPE_INCOMPATIBILITY_INVALID_INCOMPATIBILITY_MODE - = 5004; - - SHIPMENT_TYPE_INCOMPATIBILITY_TOO_MANY_INCOMPATIBILITIES - = 5005; - - - SHIPMENT_TYPE_REQUIREMENT_ERROR = 52; - - - SHIPMENT_TYPE_REQUIREMENT_NO_REQUIRED_TYPE = 52001; - - SHIPMENT_TYPE_REQUIREMENT_NO_DEPENDENT_TYPE = 52002; - - SHIPMENT_TYPE_REQUIREMENT_INVALID_REQUIREMENT_MODE = - 52003; - - SHIPMENT_TYPE_REQUIREMENT_TOO_MANY_REQUIREMENTS = - 52004; - - SHIPMENT_TYPE_REQUIREMENT_EMPTY_REQUIRED_TYPE = 52005; - - SHIPMENT_TYPE_REQUIREMENT_DUPLICATE_REQUIRED_TYPE = - 52006; - - SHIPMENT_TYPE_REQUIREMENT_NO_REQUIRED_TYPE_FOUND = - 52007; - - SHIPMENT_TYPE_REQUIREMENT_EMPTY_DEPENDENT_TYPE = - 52008; - - SHIPMENT_TYPE_REQUIREMENT_DUPLICATE_DEPENDENT_TYPE = - 52009; - - SHIPMENT_TYPE_REQUIREMENT_SELF_DEPENDENT_TYPE = 52010; - - SHIPMENT_TYPE_REQUIREMENT_GRAPH_HAS_CYCLES = 52011; - - - VEHICLE_OPERATOR_ERROR = 54; - - - VEHICLE_OPERATOR_EMPTY_TYPE = 5400; - - VEHICLE_OPERATOR_MULTIPLE_START_TIME_WINDOWS = 5401; - - VEHICLE_OPERATOR_SOFT_START_TIME_WINDOW = 5402; - - VEHICLE_OPERATOR_MULTIPLE_END_TIME_WINDOWS = 5403; - - VEHICLE_OPERATOR_SOFT_END_TIME_WINDOW = 5404; - - - DURATION_SECONDS_MATRIX_ERROR = 56; - - - DURATION_SECONDS_MATRIX_DURATION_NEGATIVE_OR_NAN = - 5600; - - DURATION_SECONDS_MATRIX_DURATION_EXCEEDS_GLOBAL_DURATION - = 5601; - - - WARNING = 9; - - - WARNING_INJECTED_FIRST_SOLUTION = 90; - - - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_SHIPMENTS_REMOVED - = 9000; - - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_AFTER_GETTING_TRAVEL_TIMES - = 9001; + error hid the old one. For example, see "MULTIPLE ERRORS". display_name (str): The error display name. fields (MutableSequence[google.maps.routeoptimization_v1.types.OptimizeToursValidationError.FieldReference]): diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index ff99ce099d17..c329d83ca2a2 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { From dbaefebce6ef5eac6e260fa995dfd765d8c9fc99 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:55:56 -0400 Subject: [PATCH 39/59] chore: release main (#13115) :robot: I have created a release *beep* *boop* ---
google-cloud-dataproc: 5.13.0 ## [5.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.12.0...google-cloud-dataproc-v5.13.0) (2024-09-30) ### Features * add support for Spark Connect sessions in Dataproc Serverless for Spark ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) ### Documentation * update docs for `filter` field in `ListSessionsRequest` ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a))
google-cloud-oracledatabase: 0.1.0 ## 0.1.0 (2024-09-30) ### Features * add initial files for google.cloud.oracledatabase.v1 ([#13100](https://github.com/googleapis/google-cloud-python/issues/13100)) ([c638f1f](https://github.com/googleapis/google-cloud-python/commit/c638f1f55a85a228ec6385095ca1befb54067188))
google-maps-routeoptimization: 0.1.4 ## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.3...google-maps-routeoptimization-v0.1.4) (2024-09-30) ### Features * A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) * Add support for generating route tokens ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) ### Documentation * A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) * A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) * A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 6 +++--- packages/google-cloud-dataproc/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/gapic_version.py | 2 +- ...snippet_metadata_google.cloud.dataproc.v1.json | 2 +- packages/google-cloud-oracledatabase/CHANGELOG.md | 11 ++++++++++- .../google/cloud/oracledatabase/gapic_version.py | 2 +- .../cloud/oracledatabase_v1/gapic_version.py | 2 +- .../google-maps-routeoptimization/CHANGELOG.md | 15 +++++++++++++++ .../maps/routeoptimization/gapic_version.py | 2 +- .../maps/routeoptimization_v1/gapic_version.py | 2 +- ...metadata_google.maps.routeoptimization.v1.json | 2 +- 12 files changed, 48 insertions(+), 12 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 37cf389b81dc..2fcaaf3233f5 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -68,7 +68,7 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.12.0", + "packages/google-cloud-dataproc": "5.13.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", @@ -117,7 +117,7 @@ "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", - "packages/google-cloud-oracledatabase": "0.0.0", + "packages/google-cloud-oracledatabase": "0.1.0", "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", @@ -184,7 +184,7 @@ "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", "packages/google-maps-places": "0.1.18", - "packages/google-maps-routeoptimization": "0.1.3", + "packages/google-maps-routeoptimization": "0.1.4", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", "packages/google-shopping-css": "0.1.8", diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index 7a8b08f948bd..2601f62c33b4 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.12.0...google-cloud-dataproc-v5.13.0) (2024-09-30) + + +### Features + +* add support for Spark Connect sessions in Dataproc Serverless for Spark ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) + + +### Documentation + +* update docs for `filter` field in `ListSessionsRequest` ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) + ## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..3f1bf5ee8722 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..3f1bf5ee8722 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..f516b09c86b6 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-oracledatabase/CHANGELOG.md b/packages/google-cloud-oracledatabase/CHANGELOG.md index 5ddad421e08f..6a8115a2ad70 100644 --- a/packages/google-cloud-oracledatabase/CHANGELOG.md +++ b/packages/google-cloud-oracledatabase/CHANGELOG.md @@ -1 +1,10 @@ -# Changelog \ No newline at end of file +# Changelog + +## 0.1.0 (2024-09-30) + + +### Features + +* add initial files for google.cloud.oracledatabase.v1 ([#13100](https://github.com/googleapis/google-cloud-python/issues/13100)) ([c638f1f](https://github.com/googleapis/google-cloud-python/commit/c638f1f55a85a228ec6385095ca1befb54067188)) + +## Changelog diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/CHANGELOG.md b/packages/google-maps-routeoptimization/CHANGELOG.md index 14bb0c6b2dc5..d53ed3a2c9c2 100644 --- a/packages/google-maps-routeoptimization/CHANGELOG.md +++ b/packages/google-maps-routeoptimization/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.3...google-maps-routeoptimization-v0.1.4) (2024-09-30) + + +### Features + +* A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) +* Add support for generating route tokens ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) + + +### Documentation + +* A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) +* A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) +* A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.2...google-maps-routeoptimization-v0.1.3) (2024-09-16) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index c329d83ca2a2..b41aa32bfdd8 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.0" + "version": "0.1.4" }, "snippets": [ { From 6dde3826c7c13ff1fcc840495811f58648e0678e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 17:20:32 -0400 Subject: [PATCH 40/59] docs: [google-cloud-run]fixed formatting of some documentation (#13122) BEGIN_COMMIT_OVERRIDE docs:fixed formatting of some documentation feat:add Builds API feat:add Service Mesh configuration to Services feat:add GPU configuration to Services feat:add INGRESS_TRAFFIC_NONE to Services feat:add ServiceScaling to Services END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat:add Builds API feat:add Service Mesh configuration to Services feat:add GPU configuration to Services feat:add INGRESS_TRAFFIC_NONE to Services feat:add ServiceScaling to Services PiperOrigin-RevId: 681696446 Source-Link: https://github.com/googleapis/googleapis/commit/4fe2139be5d934a40005c41b9dc132a143457ad0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cb5e78c3c478f81a8c813af5887757dc692052f3 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJ1bi8uT3dsQm90LnlhbWwiLCJoIjoiY2I1ZTc4YzNjNDc4ZjgxYThjODEzYWY1ODg3NzU3ZGM2OTIwNTJmMyJ9 --------- Co-authored-by: Owl Bot --- .../google-cloud-run/docs/run_v2/builds.rst | 6 + .../docs/run_v2/services_.rst | 1 + .../google/cloud/run/__init__.py | 16 + .../google/cloud/run_v2/__init__.py | 11 + .../google/cloud/run_v2/gapic_metadata.json | 34 + .../cloud/run_v2/services/builds/__init__.py | 22 + .../run_v2/services/builds/async_client.py | 586 +++ .../cloud/run_v2/services/builds/client.py | 1021 ++++++ .../services/builds/transports/__init__.py | 36 + .../run_v2/services/builds/transports/base.py | 199 ++ .../run_v2/services/builds/transports/grpc.py | 343 ++ .../builds/transports/grpc_asyncio.py | 353 ++ .../run_v2/services/builds/transports/rest.py | 665 ++++ .../run_v2/services/jobs/async_client.py | 2 + .../cloud/run_v2/services/jobs/client.py | 15 + .../run_v2/services/revisions/async_client.py | 2 + .../cloud/run_v2/services/revisions/client.py | 22 + .../run_v2/services/services/async_client.py | 4 + .../cloud/run_v2/services/services/client.py | 37 + .../google/cloud/run_v2/types/__init__.py | 8 + .../google/cloud/run_v2/types/build.py | 251 ++ .../google/cloud/run_v2/types/revision.py | 14 + .../cloud/run_v2/types/revision_template.py | 21 +- .../google/cloud/run_v2/types/service.py | 38 +- .../cloud/run_v2/types/vendor_settings.py | 85 +- ..._v2_generated_builds_submit_build_async.py | 58 + ...n_v2_generated_builds_submit_build_sync.py | 58 + .../snippet_metadata_google.cloud.run.v2.json | 153 + .../scripts/fixup_run_v2_keywords.py | 1 + .../tests/unit/gapic/run_v2/test_builds.py | 3162 +++++++++++++++++ .../tests/unit/gapic/run_v2/test_jobs.py | 64 +- .../tests/unit/gapic/run_v2/test_revisions.py | 96 +- .../tests/unit/gapic/run_v2/test_services.py | 160 +- 33 files changed, 7417 insertions(+), 127 deletions(-) create mode 100644 packages/google-cloud-run/docs/run_v2/builds.rst create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/types/build.py create mode 100644 packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py create mode 100644 packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py create mode 100644 packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py diff --git a/packages/google-cloud-run/docs/run_v2/builds.rst b/packages/google-cloud-run/docs/run_v2/builds.rst new file mode 100644 index 000000000000..fdedc682ab01 --- /dev/null +++ b/packages/google-cloud-run/docs/run_v2/builds.rst @@ -0,0 +1,6 @@ +Builds +------------------------ + +.. automodule:: google.cloud.run_v2.services.builds + :members: + :inherited-members: diff --git a/packages/google-cloud-run/docs/run_v2/services_.rst b/packages/google-cloud-run/docs/run_v2/services_.rst index c4c4ad864dfd..4a37414732ec 100644 --- a/packages/google-cloud-run/docs/run_v2/services_.rst +++ b/packages/google-cloud-run/docs/run_v2/services_.rst @@ -3,6 +3,7 @@ Services for Google Cloud Run v2 API .. toctree:: :maxdepth: 2 + builds executions jobs revisions diff --git a/packages/google-cloud-run/google/cloud/run/__init__.py b/packages/google-cloud-run/google/cloud/run/__init__.py index 740233a8afb0..47b51fb6ddca 100644 --- a/packages/google-cloud-run/google/cloud/run/__init__.py +++ b/packages/google-cloud-run/google/cloud/run/__init__.py @@ -18,6 +18,8 @@ __version__ = package_version.__version__ +from google.cloud.run_v2.services.builds.async_client import BuildsAsyncClient +from google.cloud.run_v2.services.builds.client import BuildsClient from google.cloud.run_v2.services.executions.async_client import ExecutionsAsyncClient from google.cloud.run_v2.services.executions.client import ExecutionsClient from google.cloud.run_v2.services.jobs.async_client import JobsAsyncClient @@ -28,6 +30,11 @@ from google.cloud.run_v2.services.services.client import ServicesClient from google.cloud.run_v2.services.tasks.async_client import TasksAsyncClient from google.cloud.run_v2.services.tasks.client import TasksClient +from google.cloud.run_v2.types.build import ( + StorageSource, + SubmitBuildRequest, + SubmitBuildResponse, +) from google.cloud.run_v2.types.condition import Condition from google.cloud.run_v2.types.execution import ( CancelExecutionRequest, @@ -106,12 +113,16 @@ EncryptionKeyRevocationAction, ExecutionEnvironment, IngressTraffic, + NodeSelector, RevisionScaling, + ServiceMesh, ServiceScaling, VpcAccess, ) __all__ = ( + "BuildsClient", + "BuildsAsyncClient", "ExecutionsClient", "ExecutionsAsyncClient", "JobsClient", @@ -122,6 +133,9 @@ "ServicesAsyncClient", "TasksClient", "TasksAsyncClient", + "StorageSource", + "SubmitBuildRequest", + "SubmitBuildResponse", "Condition", "CancelExecutionRequest", "DeleteExecutionRequest", @@ -182,7 +196,9 @@ "TrafficTargetStatus", "TrafficTargetAllocationType", "BinaryAuthorization", + "NodeSelector", "RevisionScaling", + "ServiceMesh", "ServiceScaling", "VpcAccess", "EncryptionKeyRevocationAction", diff --git a/packages/google-cloud-run/google/cloud/run_v2/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/__init__.py index 6c8895daadbd..ddc4d0dd73ee 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/__init__.py +++ b/packages/google-cloud-run/google/cloud/run_v2/__init__.py @@ -18,11 +18,13 @@ __version__ = package_version.__version__ +from .services.builds import BuildsAsyncClient, BuildsClient from .services.executions import ExecutionsAsyncClient, ExecutionsClient from .services.jobs import JobsAsyncClient, JobsClient from .services.revisions import RevisionsAsyncClient, RevisionsClient from .services.services import ServicesAsyncClient, ServicesClient from .services.tasks import TasksAsyncClient, TasksClient +from .types.build import StorageSource, SubmitBuildRequest, SubmitBuildResponse from .types.condition import Condition from .types.execution import ( CancelExecutionRequest, @@ -101,18 +103,22 @@ EncryptionKeyRevocationAction, ExecutionEnvironment, IngressTraffic, + NodeSelector, RevisionScaling, + ServiceMesh, ServiceScaling, VpcAccess, ) __all__ = ( + "BuildsAsyncClient", "ExecutionsAsyncClient", "JobsAsyncClient", "RevisionsAsyncClient", "ServicesAsyncClient", "TasksAsyncClient", "BinaryAuthorization", + "BuildsClient", "CancelExecutionRequest", "CloudSqlInstance", "Condition", @@ -156,6 +162,7 @@ "ListTasksRequest", "ListTasksResponse", "NFSVolumeSource", + "NodeSelector", "Probe", "ResourceRequirements", "Revision", @@ -167,8 +174,12 @@ "SecretKeySelector", "SecretVolumeSource", "Service", + "ServiceMesh", "ServiceScaling", "ServicesClient", + "StorageSource", + "SubmitBuildRequest", + "SubmitBuildResponse", "TCPSocketAction", "Task", "TaskAttemptResult", diff --git a/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json b/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json index e4131ae5c8f6..3f193aa3f5a1 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json +++ b/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json @@ -5,6 +5,40 @@ "protoPackage": "google.cloud.run.v2", "schema": "1.0", "services": { + "Builds": { + "clients": { + "grpc": { + "libraryClient": "BuildsClient", + "rpcs": { + "SubmitBuild": { + "methods": [ + "submit_build" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BuildsAsyncClient", + "rpcs": { + "SubmitBuild": { + "methods": [ + "submit_build" + ] + } + } + }, + "rest": { + "libraryClient": "BuildsClient", + "rpcs": { + "SubmitBuild": { + "methods": [ + "submit_build" + ] + } + } + } + } + }, "Executions": { "clients": { "grpc": { diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py new file mode 100644 index 000000000000..c8c671c7635b --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BuildsAsyncClient +from .client import BuildsClient + +__all__ = ( + "BuildsClient", + "BuildsAsyncClient", +) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py new file mode 100644 index 000000000000..db45db73e87f --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py @@ -0,0 +1,586 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.run_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.run_v2.types import build + +from .client import BuildsClient +from .transports.base import DEFAULT_CLIENT_INFO, BuildsTransport +from .transports.grpc_asyncio import BuildsGrpcAsyncIOTransport + + +class BuildsAsyncClient: + """Cloud Run Build Control Plane API""" + + _client: BuildsClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BuildsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BuildsClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BuildsClient._DEFAULT_UNIVERSE + + worker_pool_path = staticmethod(BuildsClient.worker_pool_path) + parse_worker_pool_path = staticmethod(BuildsClient.parse_worker_pool_path) + common_billing_account_path = staticmethod(BuildsClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + BuildsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BuildsClient.common_folder_path) + parse_common_folder_path = staticmethod(BuildsClient.parse_common_folder_path) + common_organization_path = staticmethod(BuildsClient.common_organization_path) + parse_common_organization_path = staticmethod( + BuildsClient.parse_common_organization_path + ) + common_project_path = staticmethod(BuildsClient.common_project_path) + parse_common_project_path = staticmethod(BuildsClient.parse_common_project_path) + common_location_path = staticmethod(BuildsClient.common_location_path) + parse_common_location_path = staticmethod(BuildsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsAsyncClient: The constructed client. + """ + return BuildsClient.from_service_account_info.__func__(BuildsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsAsyncClient: The constructed client. + """ + return BuildsClient.from_service_account_file.__func__(BuildsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BuildsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BuildsTransport: + """Returns the transport used by the client instance. + + Returns: + BuildsTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BuildsClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, BuildsTransport, Callable[..., BuildsTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the builds async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BuildsTransport,Callable[..., BuildsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BuildsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BuildsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def submit_build( + self, + request: Optional[Union[build.SubmitBuildRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> build.SubmitBuildResponse: + r"""Submits a build in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import run_v2 + + async def sample_submit_build(): + # Create a client + client = run_v2.BuildsAsyncClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = await client.submit_build(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.run_v2.types.SubmitBuildRequest, dict]]): + The request object. Request message for submitting a + Build. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.run_v2.types.SubmitBuildResponse: + Response message for submitting a + Build. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, build.SubmitBuildRequest): + request = build.SubmitBuildRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.submit_build + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BuildsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BuildsAsyncClient",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py new file mode 100644 index 000000000000..e7fdd36add08 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py @@ -0,0 +1,1021 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.run_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.run_v2.types import build + +from .transports.base import DEFAULT_CLIENT_INFO, BuildsTransport +from .transports.grpc import BuildsGrpcTransport +from .transports.grpc_asyncio import BuildsGrpcAsyncIOTransport +from .transports.rest import BuildsRestTransport + + +class BuildsClientMeta(type): + """Metaclass for the Builds client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[BuildsTransport]] + _transport_registry["grpc"] = BuildsGrpcTransport + _transport_registry["grpc_asyncio"] = BuildsGrpcAsyncIOTransport + _transport_registry["rest"] = BuildsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BuildsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BuildsClient(metaclass=BuildsClientMeta): + """Cloud Run Build Control Plane API""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "run.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "run.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BuildsTransport: + """Returns the transport used by the client instance. + + Returns: + BuildsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def worker_pool_path( + project: str, + location: str, + worker_pool: str, + ) -> str: + """Returns a fully-qualified worker_pool string.""" + return ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) + ) + + @staticmethod + def parse_worker_pool_path(path: str) -> Dict[str, str]: + """Parses a worker_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BuildsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BuildsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BuildsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = BuildsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or BuildsClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, BuildsTransport, Callable[..., BuildsTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the builds client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BuildsTransport,Callable[..., BuildsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BuildsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BuildsClient._read_environment_variables() + self._client_cert_source = BuildsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = BuildsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BuildsTransport) + if transport_provided: + # transport is a BuildsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BuildsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or BuildsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BuildsTransport], Callable[..., BuildsTransport] + ] = ( + BuildsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BuildsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def submit_build( + self, + request: Optional[Union[build.SubmitBuildRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> build.SubmitBuildResponse: + r"""Submits a build in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import run_v2 + + def sample_submit_build(): + # Create a client + client = run_v2.BuildsClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = client.submit_build(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.run_v2.types.SubmitBuildRequest, dict]): + The request object. Request message for submitting a + Build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.run_v2.types.SubmitBuildResponse: + Response message for submitting a + Build. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, build.SubmitBuildRequest): + request = build.SubmitBuildRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.submit_build] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BuildsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BuildsClient",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py new file mode 100644 index 000000000000..bf368793a29f --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BuildsTransport +from .grpc import BuildsGrpcTransport +from .grpc_asyncio import BuildsGrpcAsyncIOTransport +from .rest import BuildsRestInterceptor, BuildsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BuildsTransport]] +_transport_registry["grpc"] = BuildsGrpcTransport +_transport_registry["grpc_asyncio"] = BuildsGrpcAsyncIOTransport +_transport_registry["rest"] = BuildsRestTransport + +__all__ = ( + "BuildsTransport", + "BuildsGrpcTransport", + "BuildsGrpcAsyncIOTransport", + "BuildsRestTransport", + "BuildsRestInterceptor", +) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py new file mode 100644 index 000000000000..c30ed50a208d --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.run_v2 import gapic_version as package_version +from google.cloud.run_v2.types import build + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class BuildsTransport(abc.ABC): + """Abstract transport class for Builds.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "run.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.submit_build: gapic_v1.method.wrap_method( + self.submit_build, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def submit_build( + self, + ) -> Callable[ + [build.SubmitBuildRequest], + Union[build.SubmitBuildResponse, Awaitable[build.SubmitBuildResponse]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BuildsTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py new file mode 100644 index 000000000000..3097fbc74d50 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.run_v2.types import build + +from .base import DEFAULT_CLIENT_INFO, BuildsTransport + + +class BuildsGrpcTransport(BuildsTransport): + """gRPC backend transport for Builds. + + Cloud Run Build Control Plane API + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def submit_build( + self, + ) -> Callable[[build.SubmitBuildRequest], build.SubmitBuildResponse]: + r"""Return a callable for the submit build method over gRPC. + + Submits a build in a given project. + + Returns: + Callable[[~.SubmitBuildRequest], + ~.SubmitBuildResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_build" not in self._stubs: + self._stubs["submit_build"] = self.grpc_channel.unary_unary( + "/google.cloud.run.v2.Builds/SubmitBuild", + request_serializer=build.SubmitBuildRequest.serialize, + response_deserializer=build.SubmitBuildResponse.deserialize, + ) + return self._stubs["submit_build"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BuildsGrpcTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d4277b75fc04 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.run_v2.types import build + +from .base import DEFAULT_CLIENT_INFO, BuildsTransport +from .grpc import BuildsGrpcTransport + + +class BuildsGrpcAsyncIOTransport(BuildsTransport): + """gRPC AsyncIO backend transport for Builds. + + Cloud Run Build Control Plane API + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def submit_build( + self, + ) -> Callable[[build.SubmitBuildRequest], Awaitable[build.SubmitBuildResponse]]: + r"""Return a callable for the submit build method over gRPC. + + Submits a build in a given project. + + Returns: + Callable[[~.SubmitBuildRequest], + Awaitable[~.SubmitBuildResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_build" not in self._stubs: + self._stubs["submit_build"] = self.grpc_channel.unary_unary( + "/google.cloud.run.v2.Builds/SubmitBuild", + request_serializer=build.SubmitBuildRequest.serialize, + response_deserializer=build.SubmitBuildResponse.deserialize, + ) + return self._stubs["submit_build"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.submit_build: gapic_v1.method_async.wrap_method( + self.submit_build, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("BuildsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py new file mode 100644 index 000000000000..7e8ec2f6ff01 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py @@ -0,0 +1,665 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.run_v2.types import build + +from .base import BuildsTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BuildsRestInterceptor: + """Interceptor for Builds. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BuildsRestTransport. + + .. code-block:: python + class MyCustomBuildsInterceptor(BuildsRestInterceptor): + def pre_submit_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_build(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BuildsRestTransport(interceptor=MyCustomBuildsInterceptor()) + client = BuildsClient(transport=transport) + + + """ + + def pre_submit_build( + self, request: build.SubmitBuildRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[build.SubmitBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_submit_build( + self, response: build.SubmitBuildResponse + ) -> build.SubmitBuildResponse: + """Post-rpc interceptor for submit_build + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_wait_operation( + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BuildsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BuildsRestInterceptor + + +class BuildsRestTransport(BuildsTransport): + """REST backend transport for Builds. + + Cloud Run Build Control Plane API + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BuildsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BuildsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _SubmitBuild(BuildsRestStub): + def __hash__(self): + return hash("SubmitBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: build.SubmitBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> build.SubmitBuildResponse: + r"""Call the submit build method over HTTP. + + Args: + request (~.build.SubmitBuildRequest): + The request object. Request message for submitting a + Build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.build.SubmitBuildResponse: + Response message for submitting a + Build. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/builds:submit", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_submit_build(request, metadata) + pb_request = build.SubmitBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = build.SubmitBuildResponse() + pb_resp = build.SubmitBuildResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_build(resp) + return resp + + @property + def submit_build( + self, + ) -> Callable[[build.SubmitBuildRequest], build.SubmitBuildResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(BuildsRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(BuildsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(BuildsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(BuildsRestStub): + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_wait_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_wait_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BuildsRestTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py index 931b728a5107..b41ee497e867 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py @@ -82,6 +82,8 @@ class JobsAsyncClient: parse_execution_path = staticmethod(JobsClient.parse_execution_path) job_path = staticmethod(JobsClient.job_path) parse_job_path = staticmethod(JobsClient.parse_job_path) + policy_path = staticmethod(JobsClient.policy_path) + parse_policy_path = staticmethod(JobsClient.parse_policy_path) secret_path = staticmethod(JobsClient.secret_path) parse_secret_path = staticmethod(JobsClient.parse_secret_path) secret_version_path = staticmethod(JobsClient.secret_version_path) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py index 90ccfba0b0a8..19f51f6caf36 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py @@ -285,6 +285,21 @@ def parse_job_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def policy_path( + project: str, + ) -> str: + """Returns a fully-qualified policy string.""" + return "projects/{project}/policy".format( + project=project, + ) + + @staticmethod + def parse_policy_path(path: str) -> Dict[str, str]: + """Parses a policy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/policy$", path) + return m.groupdict() if m else {} + @staticmethod def secret_path( project: str, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py index 4862a5b8fd37..37ff9b05024c 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py @@ -80,6 +80,8 @@ class RevisionsAsyncClient: parse_connector_path = staticmethod(RevisionsClient.parse_connector_path) crypto_key_path = staticmethod(RevisionsClient.crypto_key_path) parse_crypto_key_path = staticmethod(RevisionsClient.parse_crypto_key_path) + mesh_path = staticmethod(RevisionsClient.mesh_path) + parse_mesh_path = staticmethod(RevisionsClient.parse_mesh_path) revision_path = staticmethod(RevisionsClient.revision_path) parse_revision_path = staticmethod(RevisionsClient.parse_revision_path) secret_path = staticmethod(RevisionsClient.secret_path) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py index fe0b0250af38..05953885ca89 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py @@ -241,6 +241,28 @@ def parse_crypto_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def mesh_path( + project: str, + location: str, + mesh: str, + ) -> str: + """Returns a fully-qualified mesh string.""" + return "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + + @staticmethod + def parse_mesh_path(path: str) -> Dict[str, str]: + """Parses a mesh path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/meshes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def revision_path( project: str, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py index 57ec39c67773..28d259c68b51 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py @@ -79,6 +79,10 @@ class ServicesAsyncClient: parse_connector_path = staticmethod(ServicesClient.parse_connector_path) crypto_key_path = staticmethod(ServicesClient.crypto_key_path) parse_crypto_key_path = staticmethod(ServicesClient.parse_crypto_key_path) + mesh_path = staticmethod(ServicesClient.mesh_path) + parse_mesh_path = staticmethod(ServicesClient.parse_mesh_path) + policy_path = staticmethod(ServicesClient.policy_path) + parse_policy_path = staticmethod(ServicesClient.parse_policy_path) revision_path = staticmethod(ServicesClient.revision_path) parse_revision_path = staticmethod(ServicesClient.parse_revision_path) secret_path = staticmethod(ServicesClient.secret_path) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py index b9909a3d3f08..65ad349bd447 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py @@ -240,6 +240,43 @@ def parse_crypto_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def mesh_path( + project: str, + location: str, + mesh: str, + ) -> str: + """Returns a fully-qualified mesh string.""" + return "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + + @staticmethod + def parse_mesh_path(path: str) -> Dict[str, str]: + """Parses a mesh path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/meshes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def policy_path( + project: str, + ) -> str: + """Returns a fully-qualified policy string.""" + return "projects/{project}/policy".format( + project=project, + ) + + @staticmethod + def parse_policy_path(path: str) -> Dict[str, str]: + """Parses a policy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/policy$", path) + return m.groupdict() if m else {} + @staticmethod def revision_path( project: str, diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py index 5ac1e2b5c025..932186b26e22 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .build import StorageSource, SubmitBuildRequest, SubmitBuildResponse from .condition import Condition from .execution import ( CancelExecutionRequest, @@ -91,12 +92,17 @@ EncryptionKeyRevocationAction, ExecutionEnvironment, IngressTraffic, + NodeSelector, RevisionScaling, + ServiceMesh, ServiceScaling, VpcAccess, ) __all__ = ( + "StorageSource", + "SubmitBuildRequest", + "SubmitBuildResponse", "Condition", "CancelExecutionRequest", "DeleteExecutionRequest", @@ -157,7 +163,9 @@ "TrafficTargetStatus", "TrafficTargetAllocationType", "BinaryAuthorization", + "NodeSelector", "RevisionScaling", + "ServiceMesh", "ServiceScaling", "VpcAccess", "EncryptionKeyRevocationAction", diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/build.py b/packages/google-cloud-run/google/cloud/run_v2/types/build.py new file mode 100644 index 000000000000..c66b6d2c5ebf --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/types/build.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.longrunning import operations_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.run.v2", + manifest={ + "SubmitBuildRequest", + "SubmitBuildResponse", + "StorageSource", + }, +) + + +class SubmitBuildRequest(proto.Message): + r"""Request message for submitting a Build. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The project and location to build in. Location + must be a region, e.g., 'us-central1' or 'global' if the + global builder is to be used. Format: + ``projects/{project}/locations/{location}`` + storage_source (google.cloud.run_v2.types.StorageSource): + Required. Source for the build. + + This field is a member of `oneof`_ ``source``. + image_uri (str): + Required. Artifact Registry URI to store the + built image. + buildpack_build (google.cloud.run_v2.types.SubmitBuildRequest.BuildpacksBuild): + Build the source using Buildpacks. + + This field is a member of `oneof`_ ``build_type``. + docker_build (google.cloud.run_v2.types.SubmitBuildRequest.DockerBuild): + Build the source using Docker. This means the + source has a Dockerfile. + + This field is a member of `oneof`_ ``build_type``. + service_account (str): + Optional. The service account to use for the + build. If not set, the default Cloud Build + service account for the project will be used. + worker_pool (str): + Optional. Name of the Cloud Build Custom Worker Pool that + should be used to build the function. The format of this + field is + ``projects/{project}/locations/{region}/workerPools/{workerPool}`` + where ``{project}`` and ``{region}`` are the project id and + region respectively where the worker pool is defined and + ``{workerPool}`` is the short name of the worker pool. + tags (MutableSequence[str]): + Optional. Additional tags to annotate the + build. + """ + + class DockerBuild(proto.Message): + r"""Build the source using Docker. This means the source has a + Dockerfile. + + """ + + class BuildpacksBuild(proto.Message): + r"""Build the source using Buildpacks. + + Attributes: + runtime (str): + The runtime name, e.g. 'go113'. Leave blank + for generic builds. + function_target (str): + Optional. Name of the function target if the + source is a function source. Required for + function builds. + cache_image_uri (str): + Optional. cache_image_uri is the GCR/AR URL where the cache + image will be stored. cache_image_uri is optional and + omitting it will disable caching. This URL must be stable + across builds. It is used to derive a build-specific + temporary URL by substituting the tag with the build ID. The + build will clean up the temporary image on a best-effort + basis. + base_image (str): + Optional. The base image used to opt into + automatic base image updates. + environment_variables (MutableMapping[str, str]): + Optional. User-provided build-time + environment variables. + enable_automatic_updates (bool): + Optional. Whether or not the application + container will be enrolled in automatic base + image updates. When true, the application will + be built on a scratch base image, so the base + layers can be appended at run time. + """ + + runtime: str = proto.Field( + proto.STRING, + number=1, + ) + function_target: str = proto.Field( + proto.STRING, + number=2, + ) + cache_image_uri: str = proto.Field( + proto.STRING, + number=3, + ) + base_image: str = proto.Field( + proto.STRING, + number=4, + ) + environment_variables: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + enable_automatic_updates: bool = proto.Field( + proto.BOOL, + number=6, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + storage_source: "StorageSource" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="StorageSource", + ) + image_uri: str = proto.Field( + proto.STRING, + number=3, + ) + buildpack_build: BuildpacksBuild = proto.Field( + proto.MESSAGE, + number=4, + oneof="build_type", + message=BuildpacksBuild, + ) + docker_build: DockerBuild = proto.Field( + proto.MESSAGE, + number=5, + oneof="build_type", + message=DockerBuild, + ) + service_account: str = proto.Field( + proto.STRING, + number=6, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=7, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class SubmitBuildResponse(proto.Message): + r"""Response message for submitting a Build. + + Attributes: + build_operation (google.longrunning.operations_pb2.Operation): + Cloud Build operation to be polled via + CloudBuild API. + base_image_uri (str): + URI of the base builder image in Artifact + Registry being used in the build. Used to opt + into automatic base image updates. + base_image_warning (str): + Warning message for the base image. + """ + + build_operation: operations_pb2.Operation = proto.Field( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + base_image_uri: str = proto.Field( + proto.STRING, + number=2, + ) + base_image_warning: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StorageSource(proto.Message): + r"""Location of the source in an archive file in Google Cloud + Storage. + + Attributes: + bucket (str): + Required. Google Cloud Storage bucket containing the source + (see `Bucket Name + Requirements `__). + object_ (str): + Required. Google Cloud Storage object containing the source. + + This object must be a gzipped archive file (``.tar.gz``) + containing source to build. + generation (int): + Optional. Google Cloud Storage generation for + the object. If the generation is omitted, the + latest generation will be used. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/revision.py b/packages/google-cloud-run/google/cloud/run_v2/types/revision.py index 1f25de97aed5..4119ad79a757 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/revision.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/revision.py @@ -248,6 +248,8 @@ class Revision(proto.Message): key (CMEK) to use to encrypt this container image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek + service_mesh (google.cloud.run_v2.types.ServiceMesh): + Enables service mesh connectivity. encryption_key_revocation_action (google.cloud.run_v2.types.EncryptionKeyRevocationAction): The action to take if the encryption key is revoked. @@ -280,6 +282,8 @@ class Revision(proto.Message): scaling_status (google.cloud.run_v2.types.RevisionScalingStatus): Output only. The current effective scaling settings for the revision. + node_selector (google.cloud.run_v2.types.NodeSelector): + The node selector for the revision. etag (str): Output only. A system-generated fingerprint for this version of the resource. May be used to @@ -379,6 +383,11 @@ class Revision(proto.Message): proto.STRING, number=21, ) + service_mesh: vendor_settings.ServiceMesh = proto.Field( + proto.MESSAGE, + number=22, + message=vendor_settings.ServiceMesh, + ) encryption_key_revocation_action: vendor_settings.EncryptionKeyRevocationAction = ( proto.Field( proto.ENUM, @@ -421,6 +430,11 @@ class Revision(proto.Message): number=39, message=status.RevisionScalingStatus, ) + node_selector: vendor_settings.NodeSelector = proto.Field( + proto.MESSAGE, + number=40, + message=vendor_settings.NodeSelector, + ) etag: str = proto.Field( proto.STRING, number=99, diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py b/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py index ca94333ae35f..c2eda044f0df 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py @@ -103,13 +103,20 @@ class RevisionTemplate(proto.Message): image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek max_instance_request_concurrency (int): - Optional. Sets the maximum number of requests - that each serving instance can receive. + Optional. Sets the maximum number of requests that each + serving instance can receive. If not specified or 0, + defaults to 80 when requested ``CPU >= 1`` and defaults to 1 + when requested ``CPU < 1``. + service_mesh (google.cloud.run_v2.types.ServiceMesh): + Optional. Enables service mesh connectivity. session_affinity (bool): Optional. Enable session affinity. health_check_disabled (bool): Optional. Disables health checking containers during deployment. + node_selector (google.cloud.run_v2.types.NodeSelector): + Optional. The node selector for the revision + template. """ revision: str = proto.Field( @@ -168,6 +175,11 @@ class RevisionTemplate(proto.Message): proto.INT32, number=15, ) + service_mesh: vendor_settings.ServiceMesh = proto.Field( + proto.MESSAGE, + number=16, + message=vendor_settings.ServiceMesh, + ) session_affinity: bool = proto.Field( proto.BOOL, number=19, @@ -176,6 +188,11 @@ class RevisionTemplate(proto.Message): proto.BOOL, number=20, ) + node_selector: vendor_settings.NodeSelector = proto.Field( + proto.MESSAGE, + number=21, + message=vendor_settings.NodeSelector, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/service.py b/packages/google-cloud-run/google/cloud/run_v2/types/service.py index 717c05138d3e..7b98547e4f7e 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/service.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/service.py @@ -358,9 +358,24 @@ class Service(proto.Message): scaling (google.cloud.run_v2.types.ServiceScaling): Optional. Specifies service-level scaling settings + invoker_iam_disabled (bool): + Optional. Disables IAM permission check for + run.routes.invoke for callers of this service. + This setting should not be used with external + ingress. default_uri_disabled (bool): Optional. Disables public resolution of the default URI of this service. + urls (MutableSequence[str]): + Output only. All URLs serving traffic for + this Service. + custom_audiences (MutableSequence[str]): + One or more custom audiences that you want + this service to support. Specify each custom + audience as the full URL in a string. The custom + audiences are encoded in the token and used to + authenticate requests. For more information, see + https://cloud.google.com/run/docs/configuring/custom-audiences. observed_generation (int): Output only. The generation of this Service currently serving traffic. See comments in ``reconciling`` for @@ -396,13 +411,6 @@ class Service(proto.Message): uri (str): Output only. The main URI in which this Service is serving traffic. - custom_audiences (MutableSequence[str]): - One or more custom audiences that you want - this service to support. Specify each custom - audience as the full URL in a string. The custom - audiences are encoded in the token and used to - authenticate requests. For more information, see - https://cloud.google.com/run/docs/configuring/custom-audiences. satisfies_pzs (bool): Output only. Reserved for future use. reconciling (bool): @@ -531,10 +539,22 @@ class Service(proto.Message): number=20, message=vendor_settings.ServiceScaling, ) + invoker_iam_disabled: bool = proto.Field( + proto.BOOL, + number=21, + ) default_uri_disabled: bool = proto.Field( proto.BOOL, number=22, ) + urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=24, + ) + custom_audiences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=37, + ) observed_generation: int = proto.Field( proto.INT64, number=30, @@ -568,10 +588,6 @@ class Service(proto.Message): proto.STRING, number=36, ) - custom_audiences: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=37, - ) satisfies_pzs: bool = proto.Field( proto.BOOL, number=38, diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py b/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py index 818261827ec0..6ac8d772f463 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py @@ -28,7 +28,9 @@ "VpcAccess", "BinaryAuthorization", "RevisionScaling", + "ServiceMesh", "ServiceScaling", + "NodeSelector", }, ) @@ -46,11 +48,14 @@ class IngressTraffic(proto.Enum): INGRESS_TRAFFIC_INTERNAL_LOAD_BALANCER (3): Both internal and Google Cloud Load Balancer traffic is allowed. + INGRESS_TRAFFIC_NONE (4): + No ingress traffic is allowed. """ INGRESS_TRAFFIC_UNSPECIFIED = 0 INGRESS_TRAFFIC_ALL = 1 INGRESS_TRAFFIC_INTERNAL_ONLY = 2 INGRESS_TRAFFIC_INTERNAL_LOAD_BALANCER = 3 + INGRESS_TRAFFIC_NONE = 4 class ExecutionEnvironment(proto.Enum): @@ -94,12 +99,11 @@ class VpcAccess(proto.Message): Attributes: connector (str): - VPC Access connector name. - Format: - projects/{project}/locations/{location}/connectors/{connector}, - where {project} can be project id or number. - For more information on sending traffic to a VPC - network via a connector, visit + VPC Access connector name. Format: + ``projects/{project}/locations/{location}/connectors/{connector}``, + where ``{project}`` can be project id or number. For more + information on sending traffic to a VPC network via a + connector, visit https://cloud.google.com/run/docs/configuring/vpc-connectors. egress (google.cloud.run_v2.types.VpcAccess.VpcEgress): Optional. Traffic VPC egress settings. If not provided, it @@ -201,9 +205,8 @@ class BinaryAuthorization(proto.Message): This field is a member of `oneof`_ ``binauthz_method``. policy (str): - Optional. The path to a binary authorization - policy. Format: - projects/{project}/platforms/cloudRun/{policy-name} + Optional. The path to a binary authorization policy. Format: + ``projects/{project}/platforms/cloudRun/{policy-name}`` This field is a member of `oneof`_ ``binauthz_method``. breakglass_justification (str): @@ -238,7 +241,11 @@ class RevisionScaling(proto.Message): that this resource should have. max_instance_count (int): Optional. Maximum number of serving instances - that this resource should have. + that this resource should have. When + unspecified, the field is set to the server + default value of + 100. For more information see + https://cloud.google.com/run/docs/configuring/max-instances """ min_instance_count: int = proto.Field( @@ -251,6 +258,23 @@ class RevisionScaling(proto.Message): ) +class ServiceMesh(proto.Message): + r"""Settings for Cloud Service Mesh. For more information see + https://cloud.google.com/service-mesh/docs/overview. + + Attributes: + mesh (str): + The Mesh resource name. Format: + ``projects/{project}/locations/global/meshes/{mesh}``, where + ``{project}`` can be project id or number. + """ + + mesh: str = proto.Field( + proto.STRING, + number=1, + ) + + class ServiceScaling(proto.Message): r"""Scaling settings applied at the service level rather than at the revision level. @@ -261,13 +285,52 @@ class ServiceScaling(proto.Message): service. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving. - (BETA) + scaling_mode (google.cloud.run_v2.types.ServiceScaling.ScalingMode): + Optional. The scaling mode for the service. """ + class ScalingMode(proto.Enum): + r"""The scaling mode for the service. If not provided, it + defaults to AUTOMATIC. + + Values: + SCALING_MODE_UNSPECIFIED (0): + Unspecified. + AUTOMATIC (1): + Scale based on traffic between min and max + instances. + MANUAL (2): + Scale to exactly min instances and ignore max + instances. + """ + SCALING_MODE_UNSPECIFIED = 0 + AUTOMATIC = 1 + MANUAL = 2 + min_instance_count: int = proto.Field( proto.INT32, number=1, ) + scaling_mode: ScalingMode = proto.Field( + proto.ENUM, + number=3, + enum=ScalingMode, + ) + + +class NodeSelector(proto.Message): + r"""Hardware constraints configuration. + + Attributes: + accelerator (str): + Required. GPU accelerator type to attach to + an instance. + """ + + accelerator: str = proto.Field( + proto.STRING, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py new file mode 100644 index 000000000000..a5a58398f493 --- /dev/null +++ b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-run + + +# [START run_v2_generated_Builds_SubmitBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import run_v2 + + +async def sample_submit_build(): + # Create a client + client = run_v2.BuildsAsyncClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = await client.submit_build(request=request) + + # Handle the response + print(response) + +# [END run_v2_generated_Builds_SubmitBuild_async] diff --git a/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py new file mode 100644 index 000000000000..17d11866d645 --- /dev/null +++ b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-run + + +# [START run_v2_generated_Builds_SubmitBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import run_v2 + + +def sample_submit_build(): + # Create a client + client = run_v2.BuildsClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = client.submit_build(request=request) + + # Handle the response + print(response) + +# [END run_v2_generated_Builds_SubmitBuild_sync] diff --git a/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json b/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json index ae607aeec9de..84a8ca294789 100644 --- a/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json +++ b/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json @@ -11,6 +11,159 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.run_v2.BuildsAsyncClient", + "shortName": "BuildsAsyncClient" + }, + "fullName": "google.cloud.run_v2.BuildsAsyncClient.submit_build", + "method": { + "fullName": "google.cloud.run.v2.Builds.SubmitBuild", + "service": { + "fullName": "google.cloud.run.v2.Builds", + "shortName": "Builds" + }, + "shortName": "SubmitBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.run_v2.types.SubmitBuildRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.run_v2.types.SubmitBuildResponse", + "shortName": "submit_build" + }, + "description": "Sample for SubmitBuild", + "file": "run_v2_generated_builds_submit_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "run_v2_generated_Builds_SubmitBuild_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "run_v2_generated_builds_submit_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.run_v2.BuildsClient", + "shortName": "BuildsClient" + }, + "fullName": "google.cloud.run_v2.BuildsClient.submit_build", + "method": { + "fullName": "google.cloud.run.v2.Builds.SubmitBuild", + "service": { + "fullName": "google.cloud.run.v2.Builds", + "shortName": "Builds" + }, + "shortName": "SubmitBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.run_v2.types.SubmitBuildRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.run_v2.types.SubmitBuildResponse", + "shortName": "submit_build" + }, + "description": "Sample for SubmitBuild", + "file": "run_v2_generated_builds_submit_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "run_v2_generated_Builds_SubmitBuild_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "run_v2_generated_builds_submit_build_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py b/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py index 2b9c966dccff..84f0f0b13518 100644 --- a/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py +++ b/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py @@ -59,6 +59,7 @@ class runCallTransformer(cst.CSTTransformer): 'list_tasks': ('parent', 'page_size', 'page_token', 'show_deleted', ), 'run_job': ('name', 'validate_only', 'etag', 'overrides', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'submit_build': ('parent', 'storage_source', 'image_uri', 'buildpack_build', 'docker_build', 'service_account', 'worker_pool', 'tags', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_job': ('job', 'validate_only', 'allow_missing', ), 'update_service': ('service', 'update_mask', 'validate_only', 'allow_missing', ), diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py new file mode 100644 index 000000000000..dd13d86db489 --- /dev/null +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py @@ -0,0 +1,3162 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.run_v2.services.builds import ( + BuildsAsyncClient, + BuildsClient, + transports, +) +from google.cloud.run_v2.types import build + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BuildsClient._get_default_mtls_endpoint(None) is None + assert BuildsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + BuildsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + BuildsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BuildsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert BuildsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert BuildsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BuildsClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BuildsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + BuildsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BuildsClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BuildsClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BuildsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BuildsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BuildsClient._read_environment_variables() == (False, "auto", "foo.com") + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BuildsClient._get_client_cert_source(None, False) is None + assert ( + BuildsClient._get_client_cert_source(mock_provided_cert_source, False) is None + ) + assert ( + BuildsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BuildsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BuildsClient._get_client_cert_source(mock_provided_cert_source, "true") + is mock_provided_cert_source + ) + + +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BuildsClient._DEFAULT_UNIVERSE + default_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BuildsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BuildsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BuildsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BuildsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + BuildsClient._get_api_endpoint(None, None, default_universe, "always") + == BuildsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BuildsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BuildsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BuildsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + BuildsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BuildsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BuildsClient._get_universe_domain(client_universe_domain, universe_domain_env) + == client_universe_domain + ) + assert ( + BuildsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BuildsClient._get_universe_domain(None, None) == BuildsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BuildsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc"), + (BuildsClient, transports.BuildsRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BuildsClient, "grpc"), + (BuildsAsyncClient, "grpc_asyncio"), + (BuildsClient, "rest"), + ], +) +def test_builds_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "run.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BuildsGrpcTransport, "grpc"), + (transports.BuildsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BuildsRestTransport, "rest"), + ], +) +def test_builds_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BuildsClient, "grpc"), + (BuildsAsyncClient, "grpc_asyncio"), + (BuildsClient, "rest"), + ], +) +def test_builds_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "run.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com" + ) + + +def test_builds_client_get_transport_class(): + transport = BuildsClient.get_transport_class() + available_transports = [ + transports.BuildsGrpcTransport, + transports.BuildsRestTransport, + ] + assert transport in available_transports + + transport = BuildsClient.get_transport_class("grpc") + assert transport == transports.BuildsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc"), + (BuildsAsyncClient, transports.BuildsGrpcAsyncIOTransport, "grpc_asyncio"), + (BuildsClient, transports.BuildsRestTransport, "rest"), + ], +) +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +def test_builds_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BuildsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BuildsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc", "true"), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (BuildsClient, transports.BuildsGrpcTransport, "grpc", "false"), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (BuildsClient, transports.BuildsRestTransport, "rest", "true"), + (BuildsClient, transports.BuildsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_builds_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [BuildsClient, BuildsAsyncClient]) +@mock.patch.object( + BuildsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BuildsClient) +) +@mock.patch.object( + BuildsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BuildsAsyncClient) +) +def test_builds_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [BuildsClient, BuildsAsyncClient]) +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +def test_builds_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BuildsClient._DEFAULT_UNIVERSE + default_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc"), + (BuildsAsyncClient, transports.BuildsGrpcAsyncIOTransport, "grpc_asyncio"), + (BuildsClient, transports.BuildsRestTransport, "rest"), + ], +) +def test_builds_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc", grpc_helpers), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (BuildsClient, transports.BuildsRestTransport, "rest", None), + ], +) +def test_builds_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_builds_client_client_options_from_dict(): + with mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BuildsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc", grpc_helpers), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_builds_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "run.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="run.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + build.SubmitBuildRequest, + dict, + ], +) +def test_submit_build(request_type, transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + response = client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = build.SubmitBuildRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, build.SubmitBuildResponse) + assert response.base_image_uri == "base_image_uri_value" + assert response.base_image_warning == "base_image_warning_value" + + +def test_submit_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.submit_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == build.SubmitBuildRequest() + + +def test_submit_build_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = build.SubmitBuildRequest( + parent="parent_value", + image_uri="image_uri_value", + service_account="service_account_value", + worker_pool="worker_pool_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.submit_build(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == build.SubmitBuildRequest( + parent="parent_value", + image_uri="image_uri_value", + service_account="service_account_value", + worker_pool="worker_pool_value", + ) + + +def test_submit_build_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_build in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.submit_build] = mock_rpc + request = {} + client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.submit_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_submit_build_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + ) + response = await client.submit_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == build.SubmitBuildRequest() + + +@pytest.mark.asyncio +async def test_submit_build_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.submit_build + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.submit_build + ] = mock_rpc + + request = {} + await client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.submit_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_submit_build_async( + transport: str = "grpc_asyncio", request_type=build.SubmitBuildRequest +): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + ) + response = await client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = build.SubmitBuildRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, build.SubmitBuildResponse) + assert response.base_image_uri == "base_image_uri_value" + assert response.base_image_warning == "base_image_warning_value" + + +@pytest.mark.asyncio +async def test_submit_build_async_from_dict(): + await test_submit_build_async(request_type=dict) + + +def test_submit_build_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = build.SubmitBuildRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value = build.SubmitBuildResponse() + client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_build_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = build.SubmitBuildRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + build.SubmitBuildResponse() + ) + await client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + build.SubmitBuildRequest, + dict, + ], +) +def test_submit_build_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = build.SubmitBuildResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.submit_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, build.SubmitBuildResponse) + assert response.base_image_uri == "base_image_uri_value" + assert response.base_image_warning == "base_image_warning_value" + + +def test_submit_build_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_build in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.submit_build] = mock_rpc + + request = {} + client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.submit_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_submit_build_rest_required_fields(request_type=build.SubmitBuildRequest): + transport_class = transports.BuildsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["image_uri"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["imageUri"] = "image_uri_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "imageUri" in jsonified_request + assert jsonified_request["imageUri"] == "image_uri_value" + + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = build.SubmitBuildResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = build.SubmitBuildResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.submit_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_submit_build_rest_unset_required_fields(): + transport = transports.BuildsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.submit_build._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "storageSource", + "imageUri", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_submit_build_rest_interceptors(null_interceptor): + transport = transports.BuildsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BuildsRestInterceptor(), + ) + client = BuildsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BuildsRestInterceptor, "post_submit_build" + ) as post, mock.patch.object( + transports.BuildsRestInterceptor, "pre_submit_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = build.SubmitBuildRequest.pb(build.SubmitBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = build.SubmitBuildResponse.to_json( + build.SubmitBuildResponse() + ) + + request = build.SubmitBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = build.SubmitBuildResponse() + + client.submit_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_submit_build_rest_bad_request( + transport: str = "rest", request_type=build.SubmitBuildRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.submit_build(request) + + +def test_submit_build_rest_error(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BuildsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BuildsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BuildsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BuildsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BuildsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BuildsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BuildsGrpcTransport, + transports.BuildsGrpcAsyncIOTransport, + transports.BuildsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = BuildsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BuildsGrpcTransport, + ) + + +def test_builds_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BuildsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_builds_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BuildsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "submit_build", + "get_operation", + "wait_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_builds_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BuildsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_builds_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BuildsTransport() + adc.assert_called_once() + + +def test_builds_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BuildsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BuildsGrpcTransport, + transports.BuildsGrpcAsyncIOTransport, + ], +) +def test_builds_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BuildsGrpcTransport, + transports.BuildsGrpcAsyncIOTransport, + transports.BuildsRestTransport, + ], +) +def test_builds_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BuildsGrpcTransport, grpc_helpers), + (transports.BuildsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_builds_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "run.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="run.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.BuildsGrpcTransport, transports.BuildsGrpcAsyncIOTransport], +) +def test_builds_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_builds_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BuildsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_builds_host_no_port(transport_name): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="run.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "run.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_builds_host_with_port(transport_name): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="run.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "run.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_builds_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BuildsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BuildsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.submit_build._session + session2 = client2.transport.submit_build._session + assert session1 != session2 + + +def test_builds_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BuildsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_builds_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BuildsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.BuildsGrpcTransport, transports.BuildsGrpcAsyncIOTransport], +) +def test_builds_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.BuildsGrpcTransport, transports.BuildsGrpcAsyncIOTransport], +) +def test_builds_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_worker_pool_path(): + project = "squid" + location = "clam" + worker_pool = "whelk" + expected = ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) + ) + actual = BuildsClient.worker_pool_path(project, location, worker_pool) + assert expected == actual + + +def test_parse_worker_pool_path(): + expected = { + "project": "octopus", + "location": "oyster", + "worker_pool": "nudibranch", + } + path = BuildsClient.worker_pool_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_worker_pool_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BuildsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = BuildsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BuildsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = BuildsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BuildsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = BuildsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = BuildsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = BuildsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BuildsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = BuildsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BuildsTransport, "_prep_wrapped_messages" + ) as prep: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BuildsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BuildsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_wait_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.WaitOperationRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) +def test_wait_operation_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_delete_operation(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_wait_operation(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_wait_operation_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_wait_operation_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BuildsClient, transports.BuildsGrpcTransport), + (BuildsAsyncClient, transports.BuildsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py index 11755c4fb3c4..7d6702058641 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py @@ -8029,9 +8029,29 @@ def test_parse_job_path(): assert expected == actual -def test_secret_path(): +def test_policy_path(): project = "oyster" - secret = "nudibranch" + expected = "projects/{project}/policy".format( + project=project, + ) + actual = JobsClient.policy_path(project) + assert expected == actual + + +def test_parse_policy_path(): + expected = { + "project": "nudibranch", + } + path = JobsClient.policy_path(**expected) + + # Check that the path construction is reversible. + actual = JobsClient.parse_policy_path(path) + assert expected == actual + + +def test_secret_path(): + project = "cuttlefish" + secret = "mussel" expected = "projects/{project}/secrets/{secret}".format( project=project, secret=secret, @@ -8042,8 +8062,8 @@ def test_secret_path(): def test_parse_secret_path(): expected = { - "project": "cuttlefish", - "secret": "mussel", + "project": "winkle", + "secret": "nautilus", } path = JobsClient.secret_path(**expected) @@ -8053,9 +8073,9 @@ def test_parse_secret_path(): def test_secret_version_path(): - project = "winkle" - secret = "nautilus" - version = "scallop" + project = "scallop" + secret = "abalone" + version = "squid" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -8067,9 +8087,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "abalone", - "secret": "squid", - "version": "clam", + "project": "clam", + "secret": "whelk", + "version": "octopus", } path = JobsClient.secret_version_path(**expected) @@ -8079,7 +8099,7 @@ def test_parse_secret_version_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8089,7 +8109,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nudibranch", } path = JobsClient.common_billing_account_path(**expected) @@ -8099,7 +8119,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8109,7 +8129,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "mussel", } path = JobsClient.common_folder_path(**expected) @@ -8119,7 +8139,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8129,7 +8149,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nautilus", } path = JobsClient.common_organization_path(**expected) @@ -8139,7 +8159,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8149,7 +8169,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "abalone", } path = JobsClient.common_project_path(**expected) @@ -8159,8 +8179,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8171,8 +8191,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "whelk", + "location": "octopus", } path = JobsClient.common_location_path(**expected) diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py index e01cb1c29f93..04d6b89fa111 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py @@ -4031,11 +4031,37 @@ def test_parse_crypto_key_path(): assert expected == actual -def test_revision_path(): +def test_mesh_path(): project = "whelk" location = "octopus" - service = "oyster" - revision = "nudibranch" + mesh = "oyster" + expected = "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + actual = RevisionsClient.mesh_path(project, location, mesh) + assert expected == actual + + +def test_parse_mesh_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "mesh": "mussel", + } + path = RevisionsClient.mesh_path(**expected) + + # Check that the path construction is reversible. + actual = RevisionsClient.parse_mesh_path(path) + assert expected == actual + + +def test_revision_path(): + project = "winkle" + location = "nautilus" + service = "scallop" + revision = "abalone" expected = "projects/{project}/locations/{location}/services/{service}/revisions/{revision}".format( project=project, location=location, @@ -4048,10 +4074,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "service": "winkle", - "revision": "nautilus", + "project": "squid", + "location": "clam", + "service": "whelk", + "revision": "octopus", } path = RevisionsClient.revision_path(**expected) @@ -4061,8 +4087,8 @@ def test_parse_revision_path(): def test_secret_path(): - project = "scallop" - secret = "abalone" + project = "oyster" + secret = "nudibranch" expected = "projects/{project}/secrets/{secret}".format( project=project, secret=secret, @@ -4073,8 +4099,8 @@ def test_secret_path(): def test_parse_secret_path(): expected = { - "project": "squid", - "secret": "clam", + "project": "cuttlefish", + "secret": "mussel", } path = RevisionsClient.secret_path(**expected) @@ -4084,9 +4110,9 @@ def test_parse_secret_path(): def test_secret_version_path(): - project = "whelk" - secret = "octopus" - version = "oyster" + project = "winkle" + secret = "nautilus" + version = "scallop" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -4098,9 +4124,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "nudibranch", - "secret": "cuttlefish", - "version": "mussel", + "project": "abalone", + "secret": "squid", + "version": "clam", } path = RevisionsClient.secret_version_path(**expected) @@ -4110,9 +4136,9 @@ def test_parse_secret_version_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "whelk" + location = "octopus" + service = "oyster" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -4124,9 +4150,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "service": "mussel", } path = RevisionsClient.service_path(**expected) @@ -4136,7 +4162,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4146,7 +4172,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nautilus", } path = RevisionsClient.common_billing_account_path(**expected) @@ -4156,7 +4182,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -4166,7 +4192,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "abalone", } path = RevisionsClient.common_folder_path(**expected) @@ -4176,7 +4202,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -4186,7 +4212,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "clam", } path = RevisionsClient.common_organization_path(**expected) @@ -4196,7 +4222,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -4206,7 +4232,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "octopus", } path = RevisionsClient.common_project_path(**expected) @@ -4216,8 +4242,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4228,8 +4254,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "cuttlefish", + "location": "mussel", } path = RevisionsClient.common_location_path(**expected) diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py index af0363c22fdb..e2de63b49bc9 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py @@ -1468,12 +1468,14 @@ def test_get_service(request_type, transport: str = "grpc"): client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -1498,12 +1500,14 @@ def test_get_service(request_type, transport: str = "grpc"): assert response.client_version == "client_version_value" assert response.ingress == vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.invoker_iam_disabled is True assert response.default_uri_disabled is True + assert response.urls == ["urls_value"] + assert response.custom_audiences == ["custom_audiences_value"] assert response.observed_generation == 2021 assert response.latest_ready_revision == "latest_ready_revision_value" assert response.latest_created_revision == "latest_created_revision_value" assert response.uri == "uri_value" - assert response.custom_audiences == ["custom_audiences_value"] assert response.satisfies_pzs is True assert response.reconciling is True assert response.etag == "etag_value" @@ -1615,12 +1619,14 @@ async def test_get_service_empty_call_async(): client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -1702,12 +1708,14 @@ async def test_get_service_async( client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -1733,12 +1741,14 @@ async def test_get_service_async( assert response.client_version == "client_version_value" assert response.ingress == vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.invoker_iam_disabled is True assert response.default_uri_disabled is True + assert response.urls == ["urls_value"] + assert response.custom_audiences == ["custom_audiences_value"] assert response.observed_generation == 2021 assert response.latest_ready_revision == "latest_ready_revision_value" assert response.latest_created_revision == "latest_created_revision_value" assert response.uri == "uri_value" - assert response.custom_audiences == ["custom_audiences_value"] assert response.satisfies_pzs is True assert response.reconciling is True assert response.etag == "etag_value" @@ -4109,8 +4119,10 @@ def test_create_service_rest(request_type): "execution_environment": 1, "encryption_key": "encryption_key_value", "max_instance_request_concurrency": 3436, + "service_mesh": {"mesh": "mesh_value"}, "session_affinity": True, "health_check_disabled": True, + "node_selector": {"accelerator": "accelerator_value"}, }, "traffic": [ { @@ -4120,8 +4132,11 @@ def test_create_service_rest(request_type): "tag": "tag_value", } ], - "scaling": {"min_instance_count": 1920}, + "scaling": {"min_instance_count": 1920, "scaling_mode": 1}, + "invoker_iam_disabled": True, "default_uri_disabled": True, + "urls": ["urls_value1", "urls_value2"], + "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "observed_generation": 2021, "terminal_condition": { "type_": "type__value", @@ -4146,7 +4161,6 @@ def test_create_service_rest(request_type): } ], "uri": "uri_value", - "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "satisfies_pzs": True, "reconciling": True, "etag": "etag_value", @@ -4569,12 +4583,14 @@ def test_get_service_rest(request_type): client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -4603,12 +4619,14 @@ def test_get_service_rest(request_type): assert response.client_version == "client_version_value" assert response.ingress == vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.invoker_iam_disabled is True assert response.default_uri_disabled is True + assert response.urls == ["urls_value"] + assert response.custom_audiences == ["custom_audiences_value"] assert response.observed_generation == 2021 assert response.latest_ready_revision == "latest_ready_revision_value" assert response.latest_created_revision == "latest_created_revision_value" assert response.uri == "uri_value" - assert response.custom_audiences == ["custom_audiences_value"] assert response.satisfies_pzs is True assert response.reconciling is True assert response.etag == "etag_value" @@ -5369,8 +5387,10 @@ def test_update_service_rest(request_type): "execution_environment": 1, "encryption_key": "encryption_key_value", "max_instance_request_concurrency": 3436, + "service_mesh": {"mesh": "mesh_value"}, "session_affinity": True, "health_check_disabled": True, + "node_selector": {"accelerator": "accelerator_value"}, }, "traffic": [ { @@ -5380,8 +5400,11 @@ def test_update_service_rest(request_type): "tag": "tag_value", } ], - "scaling": {"min_instance_count": 1920}, + "scaling": {"min_instance_count": 1920, "scaling_mode": 1}, + "invoker_iam_disabled": True, "default_uri_disabled": True, + "urls": ["urls_value1", "urls_value2"], + "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "observed_generation": 2021, "terminal_condition": { "type_": "type__value", @@ -5406,7 +5429,6 @@ def test_update_service_rest(request_type): } ], "uri": "uri_value", - "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "satisfies_pzs": True, "reconciling": True, "etag": "etag_value", @@ -7487,11 +7509,57 @@ def test_parse_crypto_key_path(): assert expected == actual -def test_revision_path(): +def test_mesh_path(): project = "whelk" location = "octopus" - service = "oyster" - revision = "nudibranch" + mesh = "oyster" + expected = "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + actual = ServicesClient.mesh_path(project, location, mesh) + assert expected == actual + + +def test_parse_mesh_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "mesh": "mussel", + } + path = ServicesClient.mesh_path(**expected) + + # Check that the path construction is reversible. + actual = ServicesClient.parse_mesh_path(path) + assert expected == actual + + +def test_policy_path(): + project = "winkle" + expected = "projects/{project}/policy".format( + project=project, + ) + actual = ServicesClient.policy_path(project) + assert expected == actual + + +def test_parse_policy_path(): + expected = { + "project": "nautilus", + } + path = ServicesClient.policy_path(**expected) + + # Check that the path construction is reversible. + actual = ServicesClient.parse_policy_path(path) + assert expected == actual + + +def test_revision_path(): + project = "scallop" + location = "abalone" + service = "squid" + revision = "clam" expected = "projects/{project}/locations/{location}/services/{service}/revisions/{revision}".format( project=project, location=location, @@ -7504,10 +7572,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "service": "winkle", - "revision": "nautilus", + "project": "whelk", + "location": "octopus", + "service": "oyster", + "revision": "nudibranch", } path = ServicesClient.revision_path(**expected) @@ -7517,8 +7585,8 @@ def test_parse_revision_path(): def test_secret_path(): - project = "scallop" - secret = "abalone" + project = "cuttlefish" + secret = "mussel" expected = "projects/{project}/secrets/{secret}".format( project=project, secret=secret, @@ -7529,8 +7597,8 @@ def test_secret_path(): def test_parse_secret_path(): expected = { - "project": "squid", - "secret": "clam", + "project": "winkle", + "secret": "nautilus", } path = ServicesClient.secret_path(**expected) @@ -7540,9 +7608,9 @@ def test_parse_secret_path(): def test_secret_version_path(): - project = "whelk" - secret = "octopus" - version = "oyster" + project = "scallop" + secret = "abalone" + version = "squid" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -7554,9 +7622,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "nudibranch", - "secret": "cuttlefish", - "version": "mussel", + "project": "clam", + "secret": "whelk", + "version": "octopus", } path = ServicesClient.secret_version_path(**expected) @@ -7566,9 +7634,9 @@ def test_parse_secret_version_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -7580,9 +7648,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = ServicesClient.service_path(**expected) @@ -7592,7 +7660,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -7602,7 +7670,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "abalone", } path = ServicesClient.common_billing_account_path(**expected) @@ -7612,7 +7680,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -7622,7 +7690,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "clam", } path = ServicesClient.common_folder_path(**expected) @@ -7632,7 +7700,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -7642,7 +7710,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "octopus", } path = ServicesClient.common_organization_path(**expected) @@ -7652,7 +7720,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -7662,7 +7730,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "nudibranch", } path = ServicesClient.common_project_path(**expected) @@ -7672,8 +7740,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -7684,8 +7752,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "winkle", + "location": "nautilus", } path = ServicesClient.common_location_path(**expected) From 023d09955a2b4e013a3506d2dbed45c3e7e4a696 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:09:14 -0400 Subject: [PATCH 41/59] docs: [google-cloud-batch] Clarify Batch only supports global custom instance template now (#13117) BEGIN_COMMIT_OVERRIDE docs: Clarify Batch only supports global custom instance template now END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 680722756 Source-Link: https://github.com/googleapis/googleapis/commit/42f7085c6332271d9b4d95c77ce3c9d5d0509cfc Source-Link: https://github.com/googleapis/googleapis-gen/commit/8aafe35f7fb64040f3f2eb79b46164250bec8483 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiI4YWFmZTM1ZjdmYjY0MDQwZjNmMmViNzliNDYxNjQyNTBiZWM4NDgzIn0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-batch/google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google-cloud-batch/google/cloud/batch_v1/types/job.py | 6 +++++- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.batch.v1.json | 2 +- .../snippet_metadata_google.cloud.batch.v1alpha.json | 2 +- 6 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 5f7f6c52ce54..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.28" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 5f7f6c52ce54..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.28" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index c11a34f16b56..a5aca6e5d1bc 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -792,7 +792,11 @@ class InstancePolicyOrTemplate(proto.Message): instance_template (str): Name of an instance template used to create VMs. Named the field as 'instance_template' instead of 'template' to avoid - c++ keyword conflict. + C++ keyword conflict. + + Batch only supports global instance templates. You can + specify the global instance template as a full or partial + URL. This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 5f7f6c52ce54..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.28" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 4aeac54efe09..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.28" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 88b47050fed2..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.28" + "version": "0.1.0" }, "snippets": [ { From c1693486f314261e3799547ee6f5e53dd7e687fc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:15:11 -0400 Subject: [PATCH 42/59] feat: [google-cloud-dialogflow] created new boolean fields in conversation dataset for zone isolation and zone separation compliance status (#13107) BEGIN_COMMIT_OVERRIDE feat: created new boolean fields in conversation dataset for zone isolation and zone separation compliance status feat: add ALAW encoding value to Audio encoding enum END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add ALAW encoding value to Audio encoding enum PiperOrigin-RevId: 678636701 Source-Link: https://github.com/googleapis/googleapis/commit/0ede901c455762b9d55ea3cf386f50663d0650ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/39c42782febc92124134995b2e7d78be762bcc22 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpYWxvZ2Zsb3cvLk93bEJvdC55YW1sIiwiaCI6IjM5YzQyNzgyZmViYzkyMTI0MTM0OTk1YjJlN2Q3OGJlNzYyYmNjMjIifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/dialogflow/gapic_version.py | 2 +- .../cloud/dialogflow_v2/gapic_version.py | 2 +- .../services/participants/async_client.py | 2 +- .../services/participants/client.py | 2 +- .../services/sessions/async_client.py | 4 ++-- .../dialogflow_v2/services/sessions/client.py | 4 ++-- .../cloud/dialogflow_v2/types/audio_config.py | 8 +++++++ .../types/conversation_dataset.py | 24 +++++++++++++++++++ .../cloud/dialogflow_v2beta1/gapic_version.py | 2 +- ...cipants_streaming_analyze_content_async.py | 2 +- ...icipants_streaming_analyze_content_sync.py | 2 +- ..._generated_sessions_detect_intent_async.py | 2 +- ...2_generated_sessions_detect_intent_sync.py | 2 +- ..._sessions_streaming_detect_intent_async.py | 2 +- ...d_sessions_streaming_detect_intent_sync.py | 2 +- ...t_metadata_google.cloud.dialogflow.v2.json | 2 +- ...adata_google.cloud.dialogflow.v2beta1.json | 2 +- .../test_conversation_datasets.py | 16 +++++++++++++ 18 files changed, 65 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py index 43f7da028296..d1cfd3548f4a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py @@ -913,7 +913,7 @@ async def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py index dcd520b9ac4e..57bcaccfbc20 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py @@ -1409,7 +1409,7 @@ def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py index fd60fadf2543..6b70ea5b5d22 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py @@ -309,7 +309,7 @@ async def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" @@ -472,7 +472,7 @@ async def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py index ad4dec9d8b41..37a879d61bd5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py @@ -784,7 +784,7 @@ def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" @@ -944,7 +944,7 @@ def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py index 200f60a0c406..2c2c2307a292 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py @@ -150,6 +150,9 @@ class AudioEncoding(proto.Enum): 5574. In other words, each RTP header is replaced with a single byte containing the block length. Only Speex wideband is supported. ``sample_rate_hertz`` must be 16000. + AUDIO_ENCODING_ALAW (8): + 8-bit samples that compand 13-bit audio + samples using G.711 PCMU/a-law. """ AUDIO_ENCODING_UNSPECIFIED = 0 AUDIO_ENCODING_LINEAR_16 = 1 @@ -159,6 +162,7 @@ class AudioEncoding(proto.Enum): AUDIO_ENCODING_AMR_WB = 5 AUDIO_ENCODING_OGG_OPUS = 6 AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE = 7 + AUDIO_ENCODING_ALAW = 8 class SpeechModelVariant(proto.Enum): @@ -258,6 +262,9 @@ class OutputAudioEncoding(proto.Enum): OUTPUT_AUDIO_ENCODING_MULAW (5): 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + OUTPUT_AUDIO_ENCODING_ALAW (6): + 8-bit samples that compand 13-bit audio + samples using G.711 PCMU/a-law. """ OUTPUT_AUDIO_ENCODING_UNSPECIFIED = 0 OUTPUT_AUDIO_ENCODING_LINEAR_16 = 1 @@ -265,6 +272,7 @@ class OutputAudioEncoding(proto.Enum): OUTPUT_AUDIO_ENCODING_MP3_64_KBPS = 4 OUTPUT_AUDIO_ENCODING_OGG_OPUS = 3 OUTPUT_AUDIO_ENCODING_MULAW = 5 + OUTPUT_AUDIO_ENCODING_ALAW = 6 class SpeechContext(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py index 788094e54596..19ba30433068 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py @@ -91,6 +91,9 @@ class ConversationDataset(proto.Message): ImportConversationData on a dataset that already has data is not allowed). + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. ConversationDataset resource name. Format: @@ -112,6 +115,17 @@ class ConversationDataset(proto.Message): conversation_count (int): Output only. The number of conversations this conversation dataset contains. + satisfies_pzi (bool): + Output only. A read only boolean field + reflecting Zone Isolation status of the dataset. + + This field is a member of `oneof`_ ``_satisfies_pzi``. + satisfies_pzs (bool): + Output only. A read only boolean field + reflecting Zone Separation status of the + dataset. + + This field is a member of `oneof`_ ``_satisfies_pzs``. """ name: str = proto.Field( @@ -145,6 +159,16 @@ class ConversationDataset(proto.Message): proto.INT64, number=7, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) class CreateConversationDatasetRequest(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py index 014ecaa28d7c..17db60fde096 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py @@ -40,7 +40,7 @@ async def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py index b99cb62d1737..a2a254ea096b 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py @@ -40,7 +40,7 @@ def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py index 3e398b3f4df6..2fa858e2bd9f 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py @@ -40,7 +40,7 @@ async def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py index 297e7a9332f5..836cee509472 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py @@ -40,7 +40,7 @@ def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py index 540b21d4a829..a0b52da11775 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py @@ -40,7 +40,7 @@ async def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py index 34b9599f4d5a..6b154fd948c9 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py @@ -40,7 +40,7 @@ def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index 7e99cd1321e2..dde14d384e60 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index a9752b2203e1..58a96bc185e8 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py index e5c420409ba5..9b288c8fa5a9 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py @@ -1641,6 +1641,8 @@ def test_get_conversation_dataset(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) response = client.get_conversation_dataset(request) @@ -1656,6 +1658,8 @@ def test_get_conversation_dataset(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.conversation_count == 1955 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True def test_get_conversation_dataset_empty_call(): @@ -1769,6 +1773,8 @@ async def test_get_conversation_dataset_empty_call_async(): display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) ) response = await client.get_conversation_dataset() @@ -1844,6 +1850,8 @@ async def test_get_conversation_dataset_async( display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) ) response = await client.get_conversation_dataset(request) @@ -1860,6 +1868,8 @@ async def test_get_conversation_dataset_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.conversation_count == 1955 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True @pytest.mark.asyncio @@ -3336,6 +3346,8 @@ def test_create_conversation_dataset_rest(request_type): "input_config": {"gcs_source": {"uris": ["uris_value1", "uris_value2"]}}, "conversation_info": {"language_code": "language_code_value"}, "conversation_count": 1955, + "satisfies_pzi": True, + "satisfies_pzs": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3740,6 +3752,8 @@ def test_get_conversation_dataset_rest(request_type): display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) # Wrap the value into a proper Response obj @@ -3759,6 +3773,8 @@ def test_get_conversation_dataset_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.conversation_count == 1955 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True def test_get_conversation_dataset_rest_use_cached_wrapped_rpc(): From 7f9bc3a7a504956eaf6eff5b80d77a15eda9e0b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:20:16 -0400 Subject: [PATCH 43/59] docs: [google-cloud-parallelstore] minor documentation formatting fix for Parallelstore (#13109) BEGIN_COMMIT_OVERRIDE docs: minor documentation formatting fix for Parallelstore feat: adding v1 version of our api docs: cleanup of Parallelstore API descriptions feat: add UPGRADING state to Parallelstore state BEGIN_PUBLIC_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 681948084 Source-Link: https://github.com/googleapis/googleapis/commit/3708fdf26f073ba5cb83c3f3a3778f701c80458d Source-Link: https://github.com/googleapis/googleapis-gen/commit/1a535dedd7a34b71f1aa1afdcb9f458594772c60 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6IjFhNTM1ZGVkZDdhMzRiNzFmMWFhMWFmZGNiOWY0NTg1OTQ3NzJjNjAifQ== BEGIN_NESTED_COMMIT docs: [google-cloud-parallelstore] minor documentation formatting fix for Parallelstore PiperOrigin-RevId: 681538037 Source-Link: https://github.com/googleapis/googleapis/commit/917e347c536bec36446ac434455da0cb2e7ddf46 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ed8a66ee826a7aaa27450e2d533eb325cb31b77f Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6ImVkOGE2NmVlODI2YTdhYWEyNzQ1MGUyZDUzM2ViMzI1Y2IzMWI3N2YifQ== END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat: [google-cloud-parallelstore] adding v1 version of our api PiperOrigin-RevId: 681144478 Source-Link: https://github.com/googleapis/googleapis/commit/692cc30fdef961552861625f45f097f576679e86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/77b92bf618cc25a1ddce15413b481020d87a43d8 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6Ijc3YjkyYmY2MThjYzI1YTFkZGNlMTU0MTNiNDgxMDIwZDg3YTQzZDgifQ== END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: [google-cloud-parallelstore] cleanup of Parallelstore API descriptions feat: add UPGRADING state to Parallelstore state PiperOrigin-RevId: 678758858 Source-Link: https://github.com/googleapis/googleapis/commit/6125b3deffdaf8d23af5a99a853b2a9a13b407d2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8f63dd4523066a23d77be22802232a20a20f6281 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6IjhmNjNkZDQ1MjMwNjZhMjNkNzdiZTIyODAyMjMyYTIwYTIwZjYyODEifQ== END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-parallelstore/docs/index.rst | 11 + .../docs/parallelstore_v1/parallelstore.rst | 10 + .../docs/parallelstore_v1/services_.rst | 6 + .../docs/parallelstore_v1/types_.rst | 6 + .../google/cloud/parallelstore_v1/__init__.py | 74 + .../parallelstore_v1/gapic_metadata.json | 133 + .../cloud/parallelstore_v1/gapic_version.py | 16 + .../google/cloud/parallelstore_v1/py.typed | 2 + .../parallelstore_v1/services/__init__.py | 15 + .../services/parallelstore/__init__.py | 22 + .../services/parallelstore/async_client.py | 1498 +++ .../services/parallelstore/client.py | 1960 ++++ .../services/parallelstore/pagers.py | 193 + .../parallelstore/transports/__init__.py | 36 + .../services/parallelstore/transports/base.py | 309 + .../services/parallelstore/transports/grpc.py | 573 ++ .../parallelstore/transports/grpc_asyncio.py | 628 ++ .../services/parallelstore/transports/rest.py | 1696 ++++ .../cloud/parallelstore_v1/types/__init__.py | 66 + .../parallelstore_v1/types/parallelstore.py | 1063 +++ .../services/parallelstore/async_client.py | 74 +- .../services/parallelstore/client.py | 74 +- .../services/parallelstore/transports/grpc.py | 14 +- .../parallelstore/transports/grpc_asyncio.py | 14 +- .../services/parallelstore/transports/rest.py | 26 +- .../types/parallelstore.py | 244 +- ...ted_parallelstore_create_instance_async.py | 61 + ...ated_parallelstore_create_instance_sync.py | 61 + ...ted_parallelstore_delete_instance_async.py | 56 + ...ated_parallelstore_delete_instance_sync.py | 56 + ...nerated_parallelstore_export_data_async.py | 60 + ...enerated_parallelstore_export_data_sync.py | 60 + ...erated_parallelstore_get_instance_async.py | 52 + ...nerated_parallelstore_get_instance_sync.py | 52 + ...nerated_parallelstore_import_data_async.py | 60 + ...enerated_parallelstore_import_data_sync.py | 60 + ...ated_parallelstore_list_instances_async.py | 53 + ...rated_parallelstore_list_instances_sync.py | 53 + ...ted_parallelstore_update_instance_async.py | 59 + ...ated_parallelstore_update_instance_sync.py | 59 + ...etadata_google.cloud.parallelstore.v1.json | 1150 +++ .../fixup_parallelstore_v1_keywords.py | 182 + .../unit/gapic/parallelstore_v1/__init__.py | 15 + .../parallelstore_v1/test_parallelstore.py | 8278 +++++++++++++++++ 44 files changed, 18950 insertions(+), 240 deletions(-) create mode 100644 packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst create mode 100644 packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst create mode 100644 packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json create mode 100644 packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py create mode 100644 packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py create mode 100644 packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py diff --git a/packages/google-cloud-parallelstore/docs/index.rst b/packages/google-cloud-parallelstore/docs/index.rst index acda35a132db..2711251a6855 100644 --- a/packages/google-cloud-parallelstore/docs/index.rst +++ b/packages/google-cloud-parallelstore/docs/index.rst @@ -2,6 +2,9 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of Parallelstore API. +By default, you will get version ``parallelstore_v1beta``. + API Reference ------------- @@ -11,6 +14,14 @@ API Reference parallelstore_v1beta/services_ parallelstore_v1beta/types_ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + parallelstore_v1/services_ + parallelstore_v1/types_ + Changelog --------- diff --git a/packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst b/packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst new file mode 100644 index 000000000000..cee322fb4453 --- /dev/null +++ b/packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst @@ -0,0 +1,10 @@ +Parallelstore +------------------------------- + +.. automodule:: google.cloud.parallelstore_v1.services.parallelstore + :members: + :inherited-members: + +.. automodule:: google.cloud.parallelstore_v1.services.parallelstore.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst b/packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst new file mode 100644 index 000000000000..70ee7e4829c2 --- /dev/null +++ b/packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Parallelstore v1 API +============================================== +.. toctree:: + :maxdepth: 2 + + parallelstore diff --git a/packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst b/packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst new file mode 100644 index 000000000000..2353120e572c --- /dev/null +++ b/packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Parallelstore v1 API +=========================================== + +.. automodule:: google.cloud.parallelstore_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py new file mode 100644 index 000000000000..0300bbd36217 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.parallelstore_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.parallelstore import ParallelstoreAsyncClient, ParallelstoreClient +from .types.parallelstore import ( + CreateInstanceRequest, + DeleteInstanceRequest, + DestinationGcsBucket, + DestinationParallelstore, + DirectoryStripeLevel, + ExportDataMetadata, + ExportDataRequest, + ExportDataResponse, + FileStripeLevel, + GetInstanceRequest, + ImportDataMetadata, + ImportDataRequest, + ImportDataResponse, + Instance, + ListInstancesRequest, + ListInstancesResponse, + OperationMetadata, + SourceGcsBucket, + SourceParallelstore, + TransferCounters, + TransferOperationMetadata, + TransferType, + UpdateInstanceRequest, +) + +__all__ = ( + "ParallelstoreAsyncClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DestinationGcsBucket", + "DestinationParallelstore", + "DirectoryStripeLevel", + "ExportDataMetadata", + "ExportDataRequest", + "ExportDataResponse", + "FileStripeLevel", + "GetInstanceRequest", + "ImportDataMetadata", + "ImportDataRequest", + "ImportDataResponse", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "OperationMetadata", + "ParallelstoreClient", + "SourceGcsBucket", + "SourceParallelstore", + "TransferCounters", + "TransferOperationMetadata", + "TransferType", + "UpdateInstanceRequest", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json new file mode 100644 index 000000000000..b7dffed6e226 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.parallelstore_v1", + "protoPackage": "google.cloud.parallelstore.v1", + "schema": "1.0", + "services": { + "Parallelstore": { + "clients": { + "grpc": { + "libraryClient": "ParallelstoreClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportData": { + "methods": [ + "export_data" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportData": { + "methods": [ + "import_data" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ParallelstoreAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportData": { + "methods": [ + "export_data" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportData": { + "methods": [ + "import_data" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "rest": { + "libraryClient": "ParallelstoreClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportData": { + "methods": [ + "export_data" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportData": { + "methods": [ + "import_data" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed new file mode 100644 index 000000000000..743160d56b7b --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-parallelstore package uses inline types. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py new file mode 100644 index 000000000000..ef9094cd9b4e --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ParallelstoreAsyncClient +from .client import ParallelstoreClient + +__all__ = ( + "ParallelstoreClient", + "ParallelstoreAsyncClient", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py new file mode 100644 index 000000000000..2d553f8706be --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py @@ -0,0 +1,1498 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.parallelstore_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.parallelstore_v1.services.parallelstore import pagers +from google.cloud.parallelstore_v1.types import parallelstore + +from .client import ParallelstoreClient +from .transports.base import DEFAULT_CLIENT_INFO, ParallelstoreTransport +from .transports.grpc_asyncio import ParallelstoreGrpcAsyncIOTransport + + +class ParallelstoreAsyncClient: + """Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + """ + + _client: ParallelstoreClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ParallelstoreClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ParallelstoreClient._DEFAULT_UNIVERSE + + address_path = staticmethod(ParallelstoreClient.address_path) + parse_address_path = staticmethod(ParallelstoreClient.parse_address_path) + instance_path = staticmethod(ParallelstoreClient.instance_path) + parse_instance_path = staticmethod(ParallelstoreClient.parse_instance_path) + network_path = staticmethod(ParallelstoreClient.network_path) + parse_network_path = staticmethod(ParallelstoreClient.parse_network_path) + service_account_path = staticmethod(ParallelstoreClient.service_account_path) + parse_service_account_path = staticmethod( + ParallelstoreClient.parse_service_account_path + ) + common_billing_account_path = staticmethod( + ParallelstoreClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ParallelstoreClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ParallelstoreClient.common_folder_path) + parse_common_folder_path = staticmethod( + ParallelstoreClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ParallelstoreClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ParallelstoreClient.parse_common_organization_path + ) + common_project_path = staticmethod(ParallelstoreClient.common_project_path) + parse_common_project_path = staticmethod( + ParallelstoreClient.parse_common_project_path + ) + common_location_path = staticmethod(ParallelstoreClient.common_location_path) + parse_common_location_path = staticmethod( + ParallelstoreClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreAsyncClient: The constructed client. + """ + return ParallelstoreClient.from_service_account_info.__func__(ParallelstoreAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreAsyncClient: The constructed client. + """ + return ParallelstoreClient.from_service_account_file.__func__(ParallelstoreAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ParallelstoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ParallelstoreTransport: + """Returns the transport used by the client instance. + + Returns: + ParallelstoreTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ParallelstoreClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ParallelstoreTransport, Callable[..., ParallelstoreTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the parallelstore async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ParallelstoreTransport,Callable[..., ParallelstoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ParallelstoreTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ParallelstoreClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_instances( + self, + request: Optional[Union[parallelstore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.ListInstancesRequest, dict]]): + The request object. List instances request. + parent (:class:`str`): + Required. The project and location for which to retrieve + instance information, in the format + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesAsyncPager: + Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ListInstancesRequest): + request = parallelstore.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_instances + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: Optional[Union[parallelstore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.Instance: + r"""Gets details of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.GetInstanceRequest, dict]]): + The request object. Get an instance's details. + name (:class:`str`): + Required. The instance resource name, in the format + ``projects/{project_id}/locations/{location}/instances/{instance_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.types.Instance: + A Parallelstore instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.GetInstanceRequest): + request = parallelstore.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance( + self, + request: Optional[Union[parallelstore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[parallelstore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Parallelstore instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.CreateInstanceRequest, dict]]): + The request object. Create a new Parallelstore instance. + parent (:class:`str`): + Required. The instance's project and location, in the + format ``projects/{project}/locations/{location}``. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.parallelstore_v1.types.Instance`): + Required. The instance to create. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The name of the Parallelstore instance. + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.CreateInstanceRequest): + request = parallelstore.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: Optional[Union[parallelstore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[parallelstore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.UpdateInstanceRequest, dict]]): + The request object. Update an instance. + instance (:class:`google.cloud.parallelstore_v1.types.Instance`): + Required. The instance to update. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. Field mask is used + to specify the fields to be overwritten in the Instance + resource by the update. At least one path must be + supplied in this field. The fields specified in the + update_mask are relative to the resource, not the full + request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.UpdateInstanceRequest): + request = parallelstore.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: Optional[Union[parallelstore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.DeleteInstanceRequest, dict]]): + The request object. Delete an instance. + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.DeleteInstanceRequest): + request = parallelstore.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_data( + self, + request: Optional[Union[parallelstore.ImportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies data from Cloud Storage to Parallelstore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.ImportDataRequest, dict]]): + The request object. Import data from Cloud Storage into a + Parallelstore instance. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ImportDataResponse` + The response to a request to import data to + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ImportDataRequest): + request = parallelstore.ImportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_data + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.ImportDataResponse, + metadata_type=parallelstore.ImportDataMetadata, + ) + + # Done; return the response. + return response + + async def export_data( + self, + request: Optional[Union[parallelstore.ExportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies data from Parallelstore to Cloud Storage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.ExportDataRequest, dict]]): + The request object. Export data from Parallelstore to + Cloud Storage. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ExportDataResponse` + The response to a request to export data from + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ExportDataRequest): + request = parallelstore.ExportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_data + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.ExportDataResponse, + metadata_type=parallelstore.ExportDataMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ParallelstoreAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ParallelstoreAsyncClient",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py new file mode 100644 index 000000000000..20afd53a8928 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py @@ -0,0 +1,1960 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.parallelstore_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.parallelstore_v1.services.parallelstore import pagers +from google.cloud.parallelstore_v1.types import parallelstore + +from .transports.base import DEFAULT_CLIENT_INFO, ParallelstoreTransport +from .transports.grpc import ParallelstoreGrpcTransport +from .transports.grpc_asyncio import ParallelstoreGrpcAsyncIOTransport +from .transports.rest import ParallelstoreRestTransport + + +class ParallelstoreClientMeta(type): + """Metaclass for the Parallelstore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ParallelstoreTransport]] + _transport_registry["grpc"] = ParallelstoreGrpcTransport + _transport_registry["grpc_asyncio"] = ParallelstoreGrpcAsyncIOTransport + _transport_registry["rest"] = ParallelstoreRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ParallelstoreTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ParallelstoreClient(metaclass=ParallelstoreClientMeta): + """Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "parallelstore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "parallelstore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ParallelstoreTransport: + """Returns the transport used by the client instance. + + Returns: + ParallelstoreTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def address_path( + project: str, + region: str, + address: str, + ) -> str: + """Returns a fully-qualified address string.""" + return "projects/{project}/regions/{region}/addresses/{address}".format( + project=project, + region=region, + address=address, + ) + + @staticmethod + def parse_address_path(path: str) -> Dict[str, str]: + """Parses a address path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/addresses/(?P
.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path( + project: str, + service_account: str, + ) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str, str]: + """Parses a service_account path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ParallelstoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ParallelstoreClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ParallelstoreTransport, Callable[..., ParallelstoreTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the parallelstore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ParallelstoreTransport,Callable[..., ParallelstoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ParallelstoreTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ParallelstoreClient._read_environment_variables() + self._client_cert_source = ParallelstoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ParallelstoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ParallelstoreTransport) + if transport_provided: + # transport is a ParallelstoreTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ParallelstoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ParallelstoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ParallelstoreTransport], Callable[..., ParallelstoreTransport] + ] = ( + ParallelstoreClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ParallelstoreTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_instances( + self, + request: Optional[Union[parallelstore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.ListInstancesRequest, dict]): + The request object. List instances request. + parent (str): + Required. The project and location for which to retrieve + instance information, in the format + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesPager: + Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ListInstancesRequest): + request = parallelstore.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: Optional[Union[parallelstore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.Instance: + r"""Gets details of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.GetInstanceRequest, dict]): + The request object. Get an instance's details. + name (str): + Required. The instance resource name, in the format + ``projects/{project_id}/locations/{location}/instances/{instance_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.types.Instance: + A Parallelstore instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.GetInstanceRequest): + request = parallelstore.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance( + self, + request: Optional[Union[parallelstore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[parallelstore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Parallelstore instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.CreateInstanceRequest, dict]): + The request object. Create a new Parallelstore instance. + parent (str): + Required. The instance's project and location, in the + format ``projects/{project}/locations/{location}``. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to create. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The name of the Parallelstore instance. + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.CreateInstanceRequest): + request = parallelstore.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: Optional[Union[parallelstore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[parallelstore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.UpdateInstanceRequest, dict]): + The request object. Update an instance. + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to update. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. Field mask is used + to specify the fields to be overwritten in the Instance + resource by the update. At least one path must be + supplied in this field. The fields specified in the + update_mask are relative to the resource, not the full + request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.UpdateInstanceRequest): + request = parallelstore.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: Optional[Union[parallelstore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.DeleteInstanceRequest, dict]): + The request object. Delete an instance. + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.DeleteInstanceRequest): + request = parallelstore.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_data( + self, + request: Optional[Union[parallelstore.ImportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies data from Cloud Storage to Parallelstore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.ImportDataRequest, dict]): + The request object. Import data from Cloud Storage into a + Parallelstore instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ImportDataResponse` + The response to a request to import data to + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ImportDataRequest): + request = parallelstore.ImportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.ImportDataResponse, + metadata_type=parallelstore.ImportDataMetadata, + ) + + # Done; return the response. + return response + + def export_data( + self, + request: Optional[Union[parallelstore.ExportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies data from Parallelstore to Cloud Storage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.ExportDataRequest, dict]): + The request object. Export data from Parallelstore to + Cloud Storage. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ExportDataResponse` + The response to a request to export data from + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ExportDataRequest): + request = parallelstore.ExportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.ExportDataResponse, + metadata_type=parallelstore.ExportDataMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ParallelstoreClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ParallelstoreClient",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py new file mode 100644 index 000000000000..d70d39fd27ac --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., parallelstore.ListInstancesResponse], + request: parallelstore.ListInstancesRequest, + response: parallelstore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.parallelstore_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.parallelstore_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = parallelstore.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[parallelstore.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[parallelstore.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[parallelstore.ListInstancesResponse]], + request: parallelstore.ListInstancesRequest, + response: parallelstore.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.parallelstore_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.parallelstore_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = parallelstore.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[parallelstore.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[parallelstore.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py new file mode 100644 index 000000000000..b8fe31557374 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ParallelstoreTransport +from .grpc import ParallelstoreGrpcTransport +from .grpc_asyncio import ParallelstoreGrpcAsyncIOTransport +from .rest import ParallelstoreRestInterceptor, ParallelstoreRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ParallelstoreTransport]] +_transport_registry["grpc"] = ParallelstoreGrpcTransport +_transport_registry["grpc_asyncio"] = ParallelstoreGrpcAsyncIOTransport +_transport_registry["rest"] = ParallelstoreRestTransport + +__all__ = ( + "ParallelstoreTransport", + "ParallelstoreGrpcTransport", + "ParallelstoreGrpcAsyncIOTransport", + "ParallelstoreRestTransport", + "ParallelstoreRestInterceptor", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py new file mode 100644 index 000000000000..e62740dc2cd0 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.parallelstore_v1 import gapic_version as package_version +from google.cloud.parallelstore_v1.types import parallelstore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ParallelstoreTransport(abc.ABC): + """Abstract transport class for Parallelstore.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "parallelstore.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_timeout=None, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.import_data: gapic_v1.method.wrap_method( + self.import_data, + default_timeout=None, + client_info=client_info, + ), + self.export_data: gapic_v1.method.wrap_method( + self.export_data, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], + Union[ + parallelstore.ListInstancesResponse, + Awaitable[parallelstore.ListInstancesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> Callable[ + [parallelstore.GetInstanceRequest], + Union[parallelstore.Instance, Awaitable[parallelstore.Instance]], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> Callable[ + [parallelstore.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> Callable[ + [parallelstore.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> Callable[ + [parallelstore.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_data( + self, + ) -> Callable[ + [parallelstore.ImportDataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_data( + self, + ) -> Callable[ + [parallelstore.ExportDataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ParallelstoreTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py new file mode 100644 index 000000000000..3f3073c101af --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py @@ -0,0 +1,573 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + +from .base import DEFAULT_CLIENT_INFO, ParallelstoreTransport + + +class ParallelstoreGrpcTransport(ParallelstoreTransport): + """gRPC backend transport for Parallelstore. + + Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], parallelstore.ListInstancesResponse + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ListInstances", + request_serializer=parallelstore.ListInstancesRequest.serialize, + response_deserializer=parallelstore.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[parallelstore.GetInstanceRequest], parallelstore.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/GetInstance", + request_serializer=parallelstore.GetInstanceRequest.serialize, + response_deserializer=parallelstore.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[parallelstore.CreateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Parallelstore instance in a given project + and location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/CreateInstance", + request_serializer=parallelstore.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[[parallelstore.UpdateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/UpdateInstance", + request_serializer=parallelstore.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[parallelstore.DeleteInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/DeleteInstance", + request_serializer=parallelstore.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def import_data( + self, + ) -> Callable[[parallelstore.ImportDataRequest], operations_pb2.Operation]: + r"""Return a callable for the import data method over gRPC. + + Copies data from Cloud Storage to Parallelstore. + + Returns: + Callable[[~.ImportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ImportData", + request_serializer=parallelstore.ImportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[[parallelstore.ExportDataRequest], operations_pb2.Operation]: + r"""Return a callable for the export data method over gRPC. + + Copies data from Parallelstore to Cloud Storage. + + Returns: + Callable[[~.ExportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ExportData", + request_serializer=parallelstore.ExportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_data"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ParallelstoreGrpcTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py new file mode 100644 index 000000000000..721fc6fcc843 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py @@ -0,0 +1,628 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + +from .base import DEFAULT_CLIENT_INFO, ParallelstoreTransport +from .grpc import ParallelstoreGrpcTransport + + +class ParallelstoreGrpcAsyncIOTransport(ParallelstoreTransport): + """gRPC AsyncIO backend transport for Parallelstore. + + Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], + Awaitable[parallelstore.ListInstancesResponse], + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ListInstances", + request_serializer=parallelstore.ListInstancesRequest.serialize, + response_deserializer=parallelstore.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[ + [parallelstore.GetInstanceRequest], Awaitable[parallelstore.Instance] + ]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/GetInstance", + request_serializer=parallelstore.GetInstanceRequest.serialize, + response_deserializer=parallelstore.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[ + [parallelstore.CreateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Parallelstore instance in a given project + and location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/CreateInstance", + request_serializer=parallelstore.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[ + [parallelstore.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/UpdateInstance", + request_serializer=parallelstore.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[ + [parallelstore.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/DeleteInstance", + request_serializer=parallelstore.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def import_data( + self, + ) -> Callable[ + [parallelstore.ImportDataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import data method over gRPC. + + Copies data from Cloud Storage to Parallelstore. + + Returns: + Callable[[~.ImportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ImportData", + request_serializer=parallelstore.ImportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[ + [parallelstore.ExportDataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export data method over gRPC. + + Copies data from Parallelstore to Cloud Storage. + + Returns: + Callable[[~.ExportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ExportData", + request_serializer=parallelstore.ExportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_data"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: gapic_v1.method_async.wrap_method( + self.list_instances, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: gapic_v1.method_async.wrap_method( + self.get_instance, + default_timeout=None, + client_info=client_info, + ), + self.create_instance: gapic_v1.method_async.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method_async.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method_async.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.import_data: gapic_v1.method_async.wrap_method( + self.import_data, + default_timeout=None, + client_info=client_info, + ), + self.export_data: gapic_v1.method_async.wrap_method( + self.export_data, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ParallelstoreGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py new file mode 100644 index 000000000000..928252b84ce3 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py @@ -0,0 +1,1696 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ParallelstoreTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ParallelstoreRestInterceptor: + """Interceptor for Parallelstore. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ParallelstoreRestTransport. + + .. code-block:: python + class MyCustomParallelstoreInterceptor(ParallelstoreRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_data(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_data(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_data(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_data(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ParallelstoreRestTransport(interceptor=MyCustomParallelstoreInterceptor()) + client = ParallelstoreClient(transport=transport) + + + """ + + def pre_create_instance( + self, + request: parallelstore.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: parallelstore.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_export_data( + self, + request: parallelstore.ExportDataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.ExportDataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_data + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_export_data( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_data + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, + request: parallelstore.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_get_instance( + self, response: parallelstore.Instance + ) -> parallelstore.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_import_data( + self, + request: parallelstore.ImportDataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.ImportDataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_data + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_import_data( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_data + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, + request: parallelstore.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_list_instances( + self, response: parallelstore.ListInstancesResponse + ) -> parallelstore.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: parallelstore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ParallelstoreRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ParallelstoreRestInterceptor + + +class ParallelstoreRestTransport(ParallelstoreTransport): + """REST backend transport for Parallelstore. + + Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ParallelstoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ParallelstoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.parallelstore.CreateInstanceRequest): + The request object. Create a new Parallelstore instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = parallelstore.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.parallelstore.DeleteInstanceRequest): + The request object. Delete an instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = parallelstore.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _ExportData(ParallelstoreRestStub): + def __hash__(self): + return hash("ExportData") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.ExportDataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export data method over HTTP. + + Args: + request (~.parallelstore.ExportDataRequest): + The request object. Export data from Parallelstore to + Cloud Storage. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:exportData", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_data(request, metadata) + pb_request = parallelstore.ExportDataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_data(resp) + return resp + + class _GetInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.parallelstore.GetInstanceRequest): + The request object. Get an instance's details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.parallelstore.Instance: + A Parallelstore instance. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = parallelstore.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = parallelstore.Instance() + pb_resp = parallelstore.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ImportData(ParallelstoreRestStub): + def __hash__(self): + return hash("ImportData") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.ImportDataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import data method over HTTP. + + Args: + request (~.parallelstore.ImportDataRequest): + The request object. Import data from Cloud Storage into a + Parallelstore instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:importData", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_data(request, metadata) + pb_request = parallelstore.ImportDataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_data(resp) + return resp + + class _ListInstances(ParallelstoreRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.parallelstore.ListInstancesRequest): + The request object. List instances request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.parallelstore.ListInstancesResponse: + Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = parallelstore.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = parallelstore.ListInstancesResponse() + pb_resp = parallelstore.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _UpdateInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.parallelstore.UpdateInstanceRequest): + The request object. Update an instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = parallelstore.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def create_instance( + self, + ) -> Callable[[parallelstore.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[parallelstore.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_data( + self, + ) -> Callable[[parallelstore.ExportDataRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportData(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[parallelstore.GetInstanceRequest], parallelstore.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_data( + self, + ) -> Callable[[parallelstore.ImportDataRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportData(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], parallelstore.ListInstancesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[parallelstore.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ParallelstoreRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ParallelstoreRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ParallelstoreRestTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py new file mode 100644 index 000000000000..faadce2e0eff --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .parallelstore import ( + CreateInstanceRequest, + DeleteInstanceRequest, + DestinationGcsBucket, + DestinationParallelstore, + DirectoryStripeLevel, + ExportDataMetadata, + ExportDataRequest, + ExportDataResponse, + FileStripeLevel, + GetInstanceRequest, + ImportDataMetadata, + ImportDataRequest, + ImportDataResponse, + Instance, + ListInstancesRequest, + ListInstancesResponse, + OperationMetadata, + SourceGcsBucket, + SourceParallelstore, + TransferCounters, + TransferOperationMetadata, + TransferType, + UpdateInstanceRequest, +) + +__all__ = ( + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DestinationGcsBucket", + "DestinationParallelstore", + "ExportDataMetadata", + "ExportDataRequest", + "ExportDataResponse", + "GetInstanceRequest", + "ImportDataMetadata", + "ImportDataRequest", + "ImportDataResponse", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "OperationMetadata", + "SourceGcsBucket", + "SourceParallelstore", + "TransferCounters", + "TransferOperationMetadata", + "UpdateInstanceRequest", + "DirectoryStripeLevel", + "FileStripeLevel", + "TransferType", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py new file mode 100644 index 000000000000..29e63b7f19f5 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py @@ -0,0 +1,1063 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.parallelstore.v1", + manifest={ + "TransferType", + "FileStripeLevel", + "DirectoryStripeLevel", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "OperationMetadata", + "SourceGcsBucket", + "DestinationGcsBucket", + "SourceParallelstore", + "DestinationParallelstore", + "ImportDataRequest", + "ExportDataRequest", + "ImportDataResponse", + "ImportDataMetadata", + "ExportDataResponse", + "ExportDataMetadata", + "TransferOperationMetadata", + "TransferCounters", + }, +) + + +class TransferType(proto.Enum): + r"""Type of transfer that occurred. + + Values: + TRANSFER_TYPE_UNSPECIFIED (0): + Zero is an illegal value. + IMPORT (1): + Imports to Parallelstore. + EXPORT (2): + Exports from Parallelstore. + """ + TRANSFER_TYPE_UNSPECIFIED = 0 + IMPORT = 1 + EXPORT = 2 + + +class FileStripeLevel(proto.Enum): + r"""Represents the striping options for files. + + Values: + FILE_STRIPE_LEVEL_UNSPECIFIED (0): + If not set, FileStripeLevel will default to + FILE_STRIPE_LEVEL_BALANCED + FILE_STRIPE_LEVEL_MIN (1): + Minimum file striping + FILE_STRIPE_LEVEL_BALANCED (2): + Medium file striping + FILE_STRIPE_LEVEL_MAX (3): + Maximum file striping + """ + FILE_STRIPE_LEVEL_UNSPECIFIED = 0 + FILE_STRIPE_LEVEL_MIN = 1 + FILE_STRIPE_LEVEL_BALANCED = 2 + FILE_STRIPE_LEVEL_MAX = 3 + + +class DirectoryStripeLevel(proto.Enum): + r"""Represents the striping options for directories. + + Values: + DIRECTORY_STRIPE_LEVEL_UNSPECIFIED (0): + If not set, DirectoryStripeLevel will default to + DIRECTORY_STRIPE_LEVEL_MAX + DIRECTORY_STRIPE_LEVEL_MIN (1): + Minimum directory striping + DIRECTORY_STRIPE_LEVEL_BALANCED (2): + Medium directory striping + DIRECTORY_STRIPE_LEVEL_MAX (3): + Maximum directory striping + """ + DIRECTORY_STRIPE_LEVEL_UNSPECIFIED = 0 + DIRECTORY_STRIPE_LEVEL_MIN = 1 + DIRECTORY_STRIPE_LEVEL_BALANCED = 2 + DIRECTORY_STRIPE_LEVEL_MAX = 3 + + +class Instance(proto.Message): + r"""A Parallelstore instance. + + Attributes: + name (str): + Identifier. The resource name of the instance, in the format + ``projects/{project}/locations/{location}/instances/{instance_id}``. + description (str): + Optional. The description of the instance. + 2048 characters or less. + state (google.cloud.parallelstore_v1.types.Instance.State): + Output only. The instance state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + labels (MutableMapping[str, str]): + Optional. Cloud Labels are a flexible and + lightweight mechanism for organizing cloud + resources into groups that reflect a customer's + organizational needs and deployment strategies. + See + https://cloud.google.com/resource-manager/docs/labels-overview + for details. + capacity_gib (int): + Required. Immutable. The instance's storage + capacity in Gibibytes (GiB). Allowed values are + between 12000 and 100000, in multiples of 4000; + e.g., 12000, 16000, 20000, ... + daos_version (str): + Output only. The version of DAOS software + running in the instance. + access_points (MutableSequence[str]): + Output only. A list of IPv4 addresses used + for client side configuration. + network (str): + Optional. Immutable. The name of the Compute Engine `VPC + network `__ to which + the instance is connected. + reserved_ip_range (str): + Optional. Immutable. The ID of the IP address range being + used by the instance's VPC network. See `Configure a VPC + network `__. + If no ID is provided, all ranges are considered. + effective_reserved_ip_range (str): + Output only. Immutable. The ID of the IP + address range being used by the instance's VPC + network. This field is populated by the service + and contains the value currently used by the + service. + file_stripe_level (google.cloud.parallelstore_v1.types.FileStripeLevel): + Optional. Stripe level for files. Allowed values are: + + - ``FILE_STRIPE_LEVEL_MIN``: offers the best performance + for small size files. + - ``FILE_STRIPE_LEVEL_BALANCED``: balances performance for + workloads involving a mix of small and large files. + - ``FILE_STRIPE_LEVEL_MAX``: higher throughput performance + for larger files. + directory_stripe_level (google.cloud.parallelstore_v1.types.DirectoryStripeLevel): + Optional. Stripe level for directories. Allowed values are: + + - ``DIRECTORY_STRIPE_LEVEL_MIN``: recommended when + directories contain a small number of files. + - ``DIRECTORY_STRIPE_LEVEL_BALANCED``: balances performance + for workloads involving a mix of small and large + directories. + - ``DIRECTORY_STRIPE_LEVEL_MAX``: recommended for + directories with a large number of files. + """ + + class State(proto.Enum): + r"""The possible states of a Parallelstore instance. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + The instance is being created. + ACTIVE (2): + The instance is available for use. + DELETING (3): + The instance is being deleted. + FAILED (4): + The instance is not usable. + UPGRADING (5): + The instance is being upgraded. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + FAILED = 4 + UPGRADING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + capacity_gib: int = proto.Field( + proto.INT64, + number=8, + ) + daos_version: str = proto.Field( + proto.STRING, + number=9, + ) + access_points: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + network: str = proto.Field( + proto.STRING, + number=11, + ) + reserved_ip_range: str = proto.Field( + proto.STRING, + number=12, + ) + effective_reserved_ip_range: str = proto.Field( + proto.STRING, + number=14, + ) + file_stripe_level: "FileStripeLevel" = proto.Field( + proto.ENUM, + number=15, + enum="FileStripeLevel", + ) + directory_stripe_level: "DirectoryStripeLevel" = proto.Field( + proto.ENUM, + number=16, + enum="DirectoryStripeLevel", + ) + + +class ListInstancesRequest(proto.Message): + r"""List instances request. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + instance information, in the format + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use "-" + as the value of ``{location}``. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, the server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + Attributes: + instances (MutableSequence[google.cloud.parallelstore_v1.types.Instance]): + The list of Parallelstore instances. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence["Instance"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Instance", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Get an instance's details. + + Attributes: + name (str): + Required. The instance resource name, in the format + ``projects/{project_id}/locations/{location}/instances/{instance_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Create a new Parallelstore instance. + + Attributes: + parent (str): + Required. The instance's project and location, in the format + ``projects/{project}/locations/{location}``. Locations map + to Google Cloud zones; for example, ``us-west1-b``. + instance_id (str): + Required. The name of the Parallelstore instance. + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: "Instance" = proto.Field( + proto.MESSAGE, + number=3, + message="Instance", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Update an instance. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. Field mask is used to + specify the fields to be overwritten in the Instance + resource by the update. At least one path must be supplied + in this field. The fields specified in the update_mask are + relative to the resource, not the full request. + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to update. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: "Instance" = proto.Field( + proto.MESSAGE, + number=2, + message="Instance", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Delete an instance. + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Long-running operation metadata. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SourceGcsBucket(proto.Message): + r"""Cloud Storage as the source of a data transfer. + + Attributes: + uri (str): + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DestinationGcsBucket(proto.Message): + r"""Cloud Storage as the destination of a data transfer. + + Attributes: + uri (str): + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SourceParallelstore(proto.Message): + r"""Parallelstore as the source of a data transfer. + + Attributes: + path (str): + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DestinationParallelstore(proto.Message): + r"""Parallelstore as the destination of a data transfer. + + Attributes: + path (str): + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportDataRequest(proto.Message): + r"""Import data from Cloud Storage into a Parallelstore instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_gcs_bucket (google.cloud.parallelstore_v1.types.SourceGcsBucket): + The Cloud Storage source bucket and, + optionally, path inside the bucket. + + This field is a member of `oneof`_ ``source``. + destination_parallelstore (google.cloud.parallelstore_v1.types.DestinationParallelstore): + Parallelstore destination. + + This field is a member of `oneof`_ ``destination``. + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + service_account (str): + Optional. User-specified service account credentials to be + used when performing the transfer. + + Use one of the following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + + If unspecified, the Parallelstore service agent is used: + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` + """ + + source_gcs_bucket: "SourceGcsBucket" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="SourceGcsBucket", + ) + destination_parallelstore: "DestinationParallelstore" = proto.Field( + proto.MESSAGE, + number=3, + oneof="destination", + message="DestinationParallelstore", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ExportDataRequest(proto.Message): + r"""Export data from Parallelstore to Cloud Storage. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_parallelstore (google.cloud.parallelstore_v1.types.SourceParallelstore): + Parallelstore source. + + This field is a member of `oneof`_ ``source``. + destination_gcs_bucket (google.cloud.parallelstore_v1.types.DestinationGcsBucket): + Cloud Storage destination. + + This field is a member of `oneof`_ ``destination``. + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + service_account (str): + Optional. User-specified Service Account (SA) credentials to + be used when performing the transfer. Use one of the + following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + + If unspecified, the Parallelstore service agent is used: + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` + """ + + source_parallelstore: "SourceParallelstore" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="SourceParallelstore", + ) + destination_gcs_bucket: "DestinationGcsBucket" = proto.Field( + proto.MESSAGE, + number=3, + oneof="destination", + message="DestinationGcsBucket", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ImportDataResponse(proto.Message): + r"""The response to a request to import data to Parallelstore.""" + + +class ImportDataMetadata(proto.Message): + r"""Metadata related to the data import operation. + + Attributes: + operation_metadata (google.cloud.parallelstore_v1.types.TransferOperationMetadata): + Data transfer operation metadata. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + operation_metadata: "TransferOperationMetadata" = proto.Field( + proto.MESSAGE, + number=1, + message="TransferOperationMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=4, + ) + verb: str = proto.Field( + proto.STRING, + number=5, + ) + status_message: str = proto.Field( + proto.STRING, + number=6, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=7, + ) + api_version: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ExportDataResponse(proto.Message): + r"""The response to a request to export data from Parallelstore.""" + + +class ExportDataMetadata(proto.Message): + r"""Metadata related to the data export operation. + + Attributes: + operation_metadata (google.cloud.parallelstore_v1.types.TransferOperationMetadata): + Data transfer operation metadata. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + operation_metadata: "TransferOperationMetadata" = proto.Field( + proto.MESSAGE, + number=1, + message="TransferOperationMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=4, + ) + verb: str = proto.Field( + proto.STRING, + number=5, + ) + status_message: str = proto.Field( + proto.STRING, + number=6, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=7, + ) + api_version: str = proto.Field( + proto.STRING, + number=8, + ) + + +class TransferOperationMetadata(proto.Message): + r"""Long-running operation metadata related to a data transfer. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_parallelstore (google.cloud.parallelstore_v1.types.SourceParallelstore): + Output only. Parallelstore source. + + This field is a member of `oneof`_ ``source``. + source_gcs_bucket (google.cloud.parallelstore_v1.types.SourceGcsBucket): + Output only. Cloud Storage source. + + This field is a member of `oneof`_ ``source``. + destination_gcs_bucket (google.cloud.parallelstore_v1.types.DestinationGcsBucket): + Output only. Cloud Storage destination. + + This field is a member of `oneof`_ ``destination``. + destination_parallelstore (google.cloud.parallelstore_v1.types.DestinationParallelstore): + Output only. Parallelstore destination. + + This field is a member of `oneof`_ ``destination``. + counters (google.cloud.parallelstore_v1.types.TransferCounters): + Output only. The progress of the transfer + operation. + transfer_type (google.cloud.parallelstore_v1.types.TransferType): + Output only. The type of transfer occurring. + """ + + source_parallelstore: "SourceParallelstore" = proto.Field( + proto.MESSAGE, + number=7, + oneof="source", + message="SourceParallelstore", + ) + source_gcs_bucket: "SourceGcsBucket" = proto.Field( + proto.MESSAGE, + number=8, + oneof="source", + message="SourceGcsBucket", + ) + destination_gcs_bucket: "DestinationGcsBucket" = proto.Field( + proto.MESSAGE, + number=9, + oneof="destination", + message="DestinationGcsBucket", + ) + destination_parallelstore: "DestinationParallelstore" = proto.Field( + proto.MESSAGE, + number=10, + oneof="destination", + message="DestinationParallelstore", + ) + counters: "TransferCounters" = proto.Field( + proto.MESSAGE, + number=3, + message="TransferCounters", + ) + transfer_type: "TransferType" = proto.Field( + proto.ENUM, + number=6, + enum="TransferType", + ) + + +class TransferCounters(proto.Message): + r"""A collection of counters that report the progress of a + transfer operation. + + Attributes: + objects_found (int): + Objects found in the data source that are + scheduled to be transferred, excluding any that + are filtered based on object conditions or + skipped due to sync. + bytes_found (int): + Bytes found in the data source that are + scheduled to be transferred, excluding any that + are filtered based on object conditions or + skipped due to sync. + objects_skipped (int): + Objects in the data source that are not + transferred because they already exist in the + data destination. + bytes_skipped (int): + Bytes in the data source that are not + transferred because they already exist in the + data destination. + objects_copied (int): + Objects that are copied to the data + destination. + bytes_copied (int): + Bytes that are copied to the data + destination. + """ + + objects_found: int = proto.Field( + proto.INT64, + number=1, + ) + bytes_found: int = proto.Field( + proto.INT64, + number=2, + ) + objects_skipped: int = proto.Field( + proto.INT64, + number=3, + ) + bytes_skipped: int = proto.Field( + proto.INT64, + number=4, + ) + objects_copied: int = proto.Field( + proto.INT64, + number=5, + ) + bytes_copied: int = proto.Field( + proto.INT64, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py index cfb13d90803a..af354b4aee21 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py @@ -300,7 +300,7 @@ async def list_instances( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: - r"""Lists Instances in a given project and location. + r"""Lists all instances in a given project and location. .. code-block:: python @@ -331,16 +331,14 @@ async def sample_list_instances(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.ListInstancesRequest, dict]]): - The request object. Message for requesting list of - Instances + The request object. List instances request. parent (:class:`str`): Required. The project and location for which to retrieve instance information, in the format - ``projects/{project_id}/locations/{location}``. For - Parallelstore locations map to Google Cloud zones, for - example **us-central1-a**. To retrieve instance - information for all locations, use "-" for the - ``{location}`` value. + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -353,11 +351,11 @@ async def sample_list_instances(): Returns: google.cloud.parallelstore_v1beta.services.parallelstore.pagers.ListInstancesAsyncPager: - Message for response to listing - Instances - Iterating over this object will yield - results and resolve additional pages - automatically. + Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -426,7 +424,7 @@ async def get_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> parallelstore.Instance: - r"""Gets details of a single Instance. + r"""Gets details of a single instance. .. code-block:: python @@ -456,7 +454,7 @@ async def sample_get_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.GetInstanceRequest, dict]]): - The request object. Request to get an instance's details. + The request object. Get an instance's details. name (:class:`str`): Required. The instance resource name, in the format ``projects/{project_id}/locations/{location}/instances/{instance_id}``. @@ -571,13 +569,12 @@ async def sample_create_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.CreateInstanceRequest, dict]]): - The request object. Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + The request object. Create a new Parallelstore instance. parent (:class:`str`): Required. The instance's project and location, in the format ``projects/{project}/locations/{location}``. - Locations map to Google Cloud zones, for example - **us-west1-b**. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -588,8 +585,7 @@ async def sample_create_instance(): on the ``request`` instance; if ``request`` is provided, this should not be set. instance_id (:class:`str`): - Required. The logical name of the Parallelstore instance - in the user project with the following restrictions: + Required. The name of the Parallelstore instance. - Must contain only lowercase letters, numbers, and hyphens. @@ -684,7 +680,7 @@ async def update_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single Instance. + r"""Updates the parameters of a single instance. .. code-block:: python @@ -721,14 +717,14 @@ async def sample_update_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.UpdateInstanceRequest, dict]]): - The request object. Message for updating a Instance + The request object. Update an instance. instance (:class:`google.cloud.parallelstore_v1beta.types.Instance`): - Required. The instance to update + Required. The instance to update. This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update .Field mask is used + Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the @@ -820,7 +816,7 @@ async def delete_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a single Instance. + r"""Deletes a single instance. .. code-block:: python @@ -854,7 +850,7 @@ async def sample_delete_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.DeleteInstanceRequest, dict]]): - The request object. Message for deleting a Instance + The request object. Delete an instance. name (:class:`str`): Required. Name of the resource This corresponds to the ``name`` field @@ -944,8 +940,7 @@ async def import_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ImportData copies data from Cloud Storage to - Parallelstore. + r"""Copies data from Cloud Storage to Parallelstore. .. code-block:: python @@ -983,9 +978,8 @@ async def sample_import_data(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.ImportDataRequest, dict]]): - The request object. Message representing the request - importing data from parallelstore to - Cloud Storage. + The request object. Import data from Cloud Storage into a + Parallelstore instance. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -998,8 +992,8 @@ async def sample_import_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ImportDataResponse` - ImportDataResponse is the response returned from - ImportData rpc. + The response to a request to import data to + Parallelstore. """ # Create or coerce a protobuf request object. @@ -1050,8 +1044,7 @@ async def export_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ExportData copies data from Parallelstore to Cloud - Storage + r"""Copies data from Parallelstore to Cloud Storage. .. code-block:: python @@ -1089,9 +1082,8 @@ async def sample_export_data(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.ExportDataRequest, dict]]): - The request object. Message representing the request - exporting data from Cloud Storage to - parallelstore. + The request object. Export data from Parallelstore to + Cloud Storage. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1104,8 +1096,8 @@ async def sample_export_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ExportDataResponse` - ExportDataResponse is the response returned from - ExportData rpc + The response to a request to export data from + Parallelstore. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py index 1cb9552118a6..03db0bc0e5c8 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py @@ -774,7 +774,7 @@ def list_instances( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: - r"""Lists Instances in a given project and location. + r"""Lists all instances in a given project and location. .. code-block:: python @@ -805,16 +805,14 @@ def sample_list_instances(): Args: request (Union[google.cloud.parallelstore_v1beta.types.ListInstancesRequest, dict]): - The request object. Message for requesting list of - Instances + The request object. List instances request. parent (str): Required. The project and location for which to retrieve instance information, in the format - ``projects/{project_id}/locations/{location}``. For - Parallelstore locations map to Google Cloud zones, for - example **us-central1-a**. To retrieve instance - information for all locations, use "-" for the - ``{location}`` value. + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -827,11 +825,11 @@ def sample_list_instances(): Returns: google.cloud.parallelstore_v1beta.services.parallelstore.pagers.ListInstancesPager: - Message for response to listing - Instances - Iterating over this object will yield - results and resolve additional pages - automatically. + Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -897,7 +895,7 @@ def get_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> parallelstore.Instance: - r"""Gets details of a single Instance. + r"""Gets details of a single instance. .. code-block:: python @@ -927,7 +925,7 @@ def sample_get_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.GetInstanceRequest, dict]): - The request object. Request to get an instance's details. + The request object. Get an instance's details. name (str): Required. The instance resource name, in the format ``projects/{project_id}/locations/{location}/instances/{instance_id}``. @@ -1039,13 +1037,12 @@ def sample_create_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.CreateInstanceRequest, dict]): - The request object. Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + The request object. Create a new Parallelstore instance. parent (str): Required. The instance's project and location, in the format ``projects/{project}/locations/{location}``. - Locations map to Google Cloud zones, for example - **us-west1-b**. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1056,8 +1053,7 @@ def sample_create_instance(): on the ``request`` instance; if ``request`` is provided, this should not be set. instance_id (str): - Required. The logical name of the Parallelstore instance - in the user project with the following restrictions: + Required. The name of the Parallelstore instance. - Must contain only lowercase letters, numbers, and hyphens. @@ -1149,7 +1145,7 @@ def update_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates the parameters of a single Instance. + r"""Updates the parameters of a single instance. .. code-block:: python @@ -1186,14 +1182,14 @@ def sample_update_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.UpdateInstanceRequest, dict]): - The request object. Message for updating a Instance + The request object. Update an instance. instance (google.cloud.parallelstore_v1beta.types.Instance): - Required. The instance to update + Required. The instance to update. This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update .Field mask is used + Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the @@ -1282,7 +1278,7 @@ def delete_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a single Instance. + r"""Deletes a single instance. .. code-block:: python @@ -1316,7 +1312,7 @@ def sample_delete_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.DeleteInstanceRequest, dict]): - The request object. Message for deleting a Instance + The request object. Delete an instance. name (str): Required. Name of the resource This corresponds to the ``name`` field @@ -1403,8 +1399,7 @@ def import_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ImportData copies data from Cloud Storage to - Parallelstore. + r"""Copies data from Cloud Storage to Parallelstore. .. code-block:: python @@ -1442,9 +1437,8 @@ def sample_import_data(): Args: request (Union[google.cloud.parallelstore_v1beta.types.ImportDataRequest, dict]): - The request object. Message representing the request - importing data from parallelstore to - Cloud Storage. + The request object. Import data from Cloud Storage into a + Parallelstore instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1457,8 +1451,8 @@ def sample_import_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ImportDataResponse` - ImportDataResponse is the response returned from - ImportData rpc. + The response to a request to import data to + Parallelstore. """ # Create or coerce a protobuf request object. @@ -1507,8 +1501,7 @@ def export_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ExportData copies data from Parallelstore to Cloud - Storage + r"""Copies data from Parallelstore to Cloud Storage. .. code-block:: python @@ -1546,9 +1539,8 @@ def sample_export_data(): Args: request (Union[google.cloud.parallelstore_v1beta.types.ExportDataRequest, dict]): - The request object. Message representing the request - exporting data from Cloud Storage to - parallelstore. + The request object. Export data from Parallelstore to + Cloud Storage. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1561,8 +1553,8 @@ def sample_export_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ExportDataResponse` - ExportDataResponse is the response returned from - ExportData rpc + The response to a request to export data from + Parallelstore. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py index 4e6eb0b5ad05..a57745ff945c 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py @@ -279,7 +279,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists all instances in a given project and location. Returns: Callable[[~.ListInstancesRequest], @@ -305,7 +305,7 @@ def get_instance( ) -> Callable[[parallelstore.GetInstanceRequest], parallelstore.Instance]: r"""Return a callable for the get instance method over gRPC. - Gets details of a single Instance. + Gets details of a single instance. Returns: Callable[[~.GetInstanceRequest], @@ -358,7 +358,7 @@ def update_instance( ) -> Callable[[parallelstore.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. - Updates the parameters of a single Instance. + Updates the parameters of a single instance. Returns: Callable[[~.UpdateInstanceRequest], @@ -384,7 +384,7 @@ def delete_instance( ) -> Callable[[parallelstore.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. - Deletes a single Instance. + Deletes a single instance. Returns: Callable[[~.DeleteInstanceRequest], @@ -410,8 +410,7 @@ def import_data( ) -> Callable[[parallelstore.ImportDataRequest], operations_pb2.Operation]: r"""Return a callable for the import data method over gRPC. - ImportData copies data from Cloud Storage to - Parallelstore. + Copies data from Cloud Storage to Parallelstore. Returns: Callable[[~.ImportDataRequest], @@ -437,8 +436,7 @@ def export_data( ) -> Callable[[parallelstore.ExportDataRequest], operations_pb2.Operation]: r"""Return a callable for the export data method over gRPC. - ExportData copies data from Parallelstore to Cloud - Storage + Copies data from Parallelstore to Cloud Storage. Returns: Callable[[~.ExportDataRequest], diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py index 67196ef6b27f..9a243a8a7c83 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py @@ -286,7 +286,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists all instances in a given project and location. Returns: Callable[[~.ListInstancesRequest], @@ -314,7 +314,7 @@ def get_instance( ]: r"""Return a callable for the get instance method over gRPC. - Gets details of a single Instance. + Gets details of a single instance. Returns: Callable[[~.GetInstanceRequest], @@ -371,7 +371,7 @@ def update_instance( ]: r"""Return a callable for the update instance method over gRPC. - Updates the parameters of a single Instance. + Updates the parameters of a single instance. Returns: Callable[[~.UpdateInstanceRequest], @@ -399,7 +399,7 @@ def delete_instance( ]: r"""Return a callable for the delete instance method over gRPC. - Deletes a single Instance. + Deletes a single instance. Returns: Callable[[~.DeleteInstanceRequest], @@ -427,8 +427,7 @@ def import_data( ]: r"""Return a callable for the import data method over gRPC. - ImportData copies data from Cloud Storage to - Parallelstore. + Copies data from Cloud Storage to Parallelstore. Returns: Callable[[~.ImportDataRequest], @@ -456,8 +455,7 @@ def export_data( ]: r"""Return a callable for the export data method over gRPC. - ExportData copies data from Parallelstore to Cloud - Storage + Copies data from Parallelstore to Cloud Storage. Returns: Callable[[~.ExportDataRequest], diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py index 89a55da7e057..ff9e16346f08 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py @@ -628,8 +628,7 @@ def __call__( Args: request (~.parallelstore.CreateInstanceRequest): - The request object. Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + The request object. Create a new Parallelstore instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -722,7 +721,7 @@ def __call__( Args: request (~.parallelstore.DeleteInstanceRequest): - The request object. Message for deleting a Instance + The request object. Delete an instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -808,9 +807,8 @@ def __call__( Args: request (~.parallelstore.ExportDataRequest): - The request object. Message representing the request - exporting data from Cloud Storage to - parallelstore. + The request object. Export data from Parallelstore to + Cloud Storage. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -903,7 +901,7 @@ def __call__( Args: request (~.parallelstore.GetInstanceRequest): - The request object. Request to get an instance's details. + The request object. Get an instance's details. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -988,9 +986,8 @@ def __call__( Args: request (~.parallelstore.ImportDataRequest): - The request object. Message representing the request - importing data from parallelstore to - Cloud Storage. + The request object. Import data from Cloud Storage into a + Parallelstore instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1083,8 +1080,7 @@ def __call__( Args: request (~.parallelstore.ListInstancesRequest): - The request object. Message for requesting list of - Instances + The request object. List instances request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1093,8 +1089,8 @@ def __call__( Returns: ~.parallelstore.ListInstancesResponse: - Message for response to listing - Instances + Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. """ @@ -1173,7 +1169,7 @@ def __call__( Args: request (~.parallelstore.UpdateInstanceRequest): - The request object. Message for updating a Instance + The request object. Update an instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py index ea929d73e265..7d28780d668e 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py @@ -72,7 +72,8 @@ class FileStripeLevel(proto.Enum): Values: FILE_STRIPE_LEVEL_UNSPECIFIED (0): - Default file striping + If not set, FileStripeLevel will default to + FILE_STRIPE_LEVEL_BALANCED FILE_STRIPE_LEVEL_MIN (1): Minimum file striping FILE_STRIPE_LEVEL_BALANCED (2): @@ -91,7 +92,8 @@ class DirectoryStripeLevel(proto.Enum): Values: DIRECTORY_STRIPE_LEVEL_UNSPECIFIED (0): - Default directory striping + If not set, DirectoryStripeLevel will default to + DIRECTORY_STRIPE_LEVEL_MAX DIRECTORY_STRIPE_LEVEL_MIN (1): Minimum directory striping DIRECTORY_STRIPE_LEVEL_BALANCED (2): @@ -111,7 +113,7 @@ class Instance(proto.Message): Attributes: name (str): Identifier. The resource name of the instance, in the format - ``projects/{project}/locations/{location}/instances/{instance_id}`` + ``projects/{project}/locations/{location}/instances/{instance_id}``. description (str): Optional. The description of the instance. 2048 characters or less. @@ -124,76 +126,62 @@ class Instance(proto.Message): Output only. The time when the instance was updated. labels (MutableMapping[str, str]): - Optional. Cloud Labels are a flexible and lightweight - mechanism for organizing cloud resources into groups that - reflect a customer's organizational needs and deployment - strategies. Cloud Labels can be used to filter collections - of resources. They can be used to control how resource - metrics are aggregated. And they can be used as arguments to - policy management rules (e.g. route, firewall, load - balancing, etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z][a-z0-9_-]{0,62}``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``[a-z0-9_-]{0,63}``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. - Therefore, you are advised to use an internal label - representation, such as JSON, which doesn't rely upon - specific characters being disallowed. For example, - representing labels as the string: name + "*" + value would - prove problematic if we were to allow "*" in a future - release. + Optional. Cloud Labels are a flexible and + lightweight mechanism for organizing cloud + resources into groups that reflect a customer's + organizational needs and deployment strategies. + See + https://cloud.google.com/resource-manager/docs/labels-overview + for details. capacity_gib (int): - Required. Immutable. Storage capacity of - Parallelstore instance in Gibibytes (GiB). + Required. Immutable. The instance's storage + capacity in Gibibytes (GiB). Allowed values are + between 12000 and 100000, in multiples of 4000; + e.g., 12000, 16000, 20000, ... daos_version (str): Output only. The version of DAOS software - running in the instance + running in the instance. access_points (MutableSequence[str]): - Output only. List of access_points. Contains a list of IPv4 - addresses used for client side configuration. + Output only. A list of IPv4 addresses used + for client side configuration. network (str): - Optional. Immutable. The name of the Google Compute Engine - `VPC network `__ to - which the instance is connected. + Optional. Immutable. The name of the Compute Engine `VPC + network `__ to which + the instance is connected. reserved_ip_range (str): - Optional. Immutable. Contains the id of the - allocated IP address range associated with the - private service access connection for example, - "test-default" associated with IP range - 10.0.0.0/29. If no range id is provided all - ranges will be considered. + Optional. Immutable. The ID of the IP address range being + used by the instance's VPC network. See `Configure a VPC + network `__. + If no ID is provided, all ranges are considered. effective_reserved_ip_range (str): - Output only. Immutable. Contains the id of - the allocated IP address range associated with - the private service access connection for - example, "test-default" associated with IP range - 10.0.0.0/29. This field is populated by the - service and and contains the value currently - used by the service. + Output only. Immutable. The ID of the IP + address range being used by the instance's VPC + network. This field is populated by the service + and contains the value currently used by the + service. file_stripe_level (google.cloud.parallelstore_v1beta.types.FileStripeLevel): - Optional. Stripe level for files. - MIN better suited for small size files. - MAX higher throughput performance for larger - files. + Optional. Stripe level for files. Allowed values are: + + - ``FILE_STRIPE_LEVEL_MIN``: offers the best performance + for small size files. + - ``FILE_STRIPE_LEVEL_BALANCED``: balances performance for + workloads involving a mix of small and large files. + - ``FILE_STRIPE_LEVEL_MAX``: higher throughput performance + for larger files. directory_stripe_level (google.cloud.parallelstore_v1beta.types.DirectoryStripeLevel): - Optional. Stripe level for directories. - MIN when directory has a small number of files. - MAX when directory has a large number of files. + Optional. Stripe level for directories. Allowed values are: + + - ``DIRECTORY_STRIPE_LEVEL_MIN``: recommended when + directories contain a small number of files. + - ``DIRECTORY_STRIPE_LEVEL_BALANCED``: balances performance + for workloads involving a mix of small and large + directories. + - ``DIRECTORY_STRIPE_LEVEL_MAX``: recommended for + directories with a large number of files. """ class State(proto.Enum): - r"""Represents the different states of a Parallelstore instance. + r"""The possible states of a Parallelstore instance. Values: STATE_UNSPECIFIED (0): @@ -206,12 +194,15 @@ class State(proto.Enum): The instance is being deleted. FAILED (4): The instance is not usable. + UPGRADING (5): + The instance is being upgraded. """ STATE_UNSPECIFIED = 0 CREATING = 1 ACTIVE = 2 DELETING = 3 FAILED = 4 + UPGRADING = 5 name: str = proto.Field( proto.STRING, @@ -278,28 +269,28 @@ class State(proto.Enum): class ListInstancesRequest(proto.Message): - r"""Message for requesting list of Instances + r"""List instances request. Attributes: parent (str): Required. The project and location for which to retrieve instance information, in the format - ``projects/{project_id}/locations/{location}``. For - Parallelstore locations map to Google Cloud zones, for - example **us-central1-a**. To retrieve instance information - for all locations, use "-" for the ``{location}`` value. + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use "-" + as the value of ``{location}``. page_size (int): Optional. Requested page size. Server may return fewer items than requested. If - unspecified, server will pick an appropriate + unspecified, the server will pick an appropriate default. page_token (str): Optional. A token identifying a page of results the server should return. filter (str): - Optional. Filtering results + Optional. Filtering results. order_by (str): - Optional. Hint for how to order the results + Optional. Hint for how to order the results. """ parent: str = proto.Field( @@ -325,11 +316,12 @@ class ListInstancesRequest(proto.Message): class ListInstancesResponse(proto.Message): - r"""Message for response to listing Instances + r"""Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. Attributes: instances (MutableSequence[google.cloud.parallelstore_v1beta.types.Instance]): - The list of Parallelstore Instances + The list of Parallelstore instances. next_page_token (str): A token identifying a page of results the server should return. @@ -357,7 +349,7 @@ def raw_page(self): class GetInstanceRequest(proto.Message): - r"""Request to get an instance's details. + r"""Get an instance's details. Attributes: name (str): @@ -372,17 +364,15 @@ class GetInstanceRequest(proto.Message): class CreateInstanceRequest(proto.Message): - r"""Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + r"""Create a new Parallelstore instance. Attributes: parent (str): Required. The instance's project and location, in the format ``projects/{project}/locations/{location}``. Locations map - to Google Cloud zones, for example **us-west1-b**. + to Google Cloud zones; for example, ``us-west1-b``. instance_id (str): - Required. The logical name of the Parallelstore instance in - the user project with the following restrictions: + Required. The name of the Parallelstore instance. - Must contain only lowercase letters, numbers, and hyphens. @@ -434,17 +424,17 @@ class CreateInstanceRequest(proto.Message): class UpdateInstanceRequest(proto.Message): - r"""Message for updating a Instance + r"""Update an instance. Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update .Field mask is used to + Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the update_mask are relative to the resource, not the full request. instance (google.cloud.parallelstore_v1beta.types.Instance): - Required. The instance to update + Required. The instance to update. request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if @@ -484,7 +474,7 @@ class UpdateInstanceRequest(proto.Message): class DeleteInstanceRequest(proto.Message): - r"""Message for deleting a Instance + r"""Delete an instance. Attributes: name (str): @@ -522,7 +512,7 @@ class DeleteInstanceRequest(proto.Message): class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. + r"""Long-running operation metadata. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -584,12 +574,13 @@ class OperationMetadata(proto.Message): class SourceGcsBucket(proto.Message): - r"""Google Cloud Storage as a source. + r"""Cloud Storage as the source of a data transfer. Attributes: uri (str): - Required. URI to a Cloud Storage object in format: - 'gs:///'. + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. """ uri: str = proto.Field( @@ -599,12 +590,13 @@ class SourceGcsBucket(proto.Message): class DestinationGcsBucket(proto.Message): - r"""Google Cloud Storage as a destination. + r"""Cloud Storage as the destination of a data transfer. Attributes: uri (str): - Required. URI to a Cloud Storage object in format: - 'gs:///'. + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. """ uri: str = proto.Field( @@ -614,13 +606,12 @@ class DestinationGcsBucket(proto.Message): class SourceParallelstore(proto.Message): - r"""Pa as a source. + r"""Parallelstore as the source of a data transfer. Attributes: path (str): - Optional. Root directory path to the - Paralellstore filesystem, starting with '/'. - Defaults to '/' if unset. + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. """ path: str = proto.Field( @@ -630,13 +621,12 @@ class SourceParallelstore(proto.Message): class DestinationParallelstore(proto.Message): - r"""Parallelstore as a destination. + r"""Parallelstore as the destination of a data transfer. Attributes: path (str): - Optional. Root directory path to the - Paralellstore filesystem, starting with '/'. - Defaults to '/' if unset. + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. """ path: str = proto.Field( @@ -646,15 +636,14 @@ class DestinationParallelstore(proto.Message): class ImportDataRequest(proto.Message): - r"""Message representing the request importing data from - parallelstore to Cloud Storage. - + r"""Import data from Cloud Storage into a Parallelstore instance. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: source_gcs_bucket (google.cloud.parallelstore_v1beta.types.SourceGcsBucket): - Cloud Storage source. + The Cloud Storage source bucket and, + optionally, path inside the bucket. This field is a member of `oneof`_ ``source``. destination_parallelstore (google.cloud.parallelstore_v1beta.types.DestinationParallelstore): @@ -684,11 +673,17 @@ class ImportDataRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). service_account (str): - Optional. User-specified Service Account (SA) credentials to - be used when performing the transfer. Format: - ``projects/{project_id}/serviceAccounts/{service_account}`` + Optional. User-specified service account credentials to be + used when performing the transfer. + + Use one of the following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + If unspecified, the Parallelstore service agent is used: - service-@gcp-sa-parallelstore.iam.gserviceaccount.com) + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` """ source_gcs_bucket: "SourceGcsBucket" = proto.Field( @@ -718,9 +713,7 @@ class ImportDataRequest(proto.Message): class ExportDataRequest(proto.Message): - r"""Message representing the request exporting data from Cloud - Storage to parallelstore. - + r"""Export data from Parallelstore to Cloud Storage. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -757,10 +750,15 @@ class ExportDataRequest(proto.Message): (00000000-0000-0000-0000-000000000000). service_account (str): Optional. User-specified Service Account (SA) credentials to - be used when performing the transfer. Format: - ``projects/{project_id}/serviceAccounts/{service_account}`` + be used when performing the transfer. Use one of the + following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + If unspecified, the Parallelstore service agent is used: - service-@gcp-sa-parallelstore.iam.gserviceaccount.com) + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` """ source_parallelstore: "SourceParallelstore" = proto.Field( @@ -790,19 +788,15 @@ class ExportDataRequest(proto.Message): class ImportDataResponse(proto.Message): - r"""ImportDataResponse is the response returned from ImportData - rpc. - - """ + r"""The response to a request to import data to Parallelstore.""" class ImportDataMetadata(proto.Message): - r"""ImportDataMetadata contains import data operation metadata + r"""Metadata related to the data import operation. Attributes: operation_metadata (google.cloud.parallelstore_v1beta.types.TransferOperationMetadata): - Contains the data transfer operation - metadata. + Data transfer operation metadata. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was created. @@ -867,19 +861,15 @@ class ImportDataMetadata(proto.Message): class ExportDataResponse(proto.Message): - r"""ExportDataResponse is the response returned from ExportData - rpc - - """ + r"""The response to a request to export data from Parallelstore.""" class ExportDataMetadata(proto.Message): - r"""ExportDataMetadata contains export data operation metadata + r"""Metadata related to the data export operation. Attributes: operation_metadata (google.cloud.parallelstore_v1beta.types.TransferOperationMetadata): - Contains the data transfer operation - metadata. + Data transfer operation metadata. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was created. @@ -944,7 +934,7 @@ class ExportDataMetadata(proto.Message): class TransferOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. + r"""Long-running operation metadata related to a data transfer. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -971,8 +961,8 @@ class TransferOperationMetadata(proto.Message): This field is a member of `oneof`_ ``destination``. counters (google.cloud.parallelstore_v1beta.types.TransferCounters): - Output only. Information about the progress - of the transfer operation. + Output only. The progress of the transfer + operation. transfer_type (google.cloud.parallelstore_v1beta.types.TransferType): Output only. The type of transfer occurring. """ diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py new file mode 100644 index 000000000000..c217c4b6b3f0 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_CreateInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py new file mode 100644 index 000000000000..5eb6bc11cbbe --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_CreateInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py new file mode 100644 index 000000000000..2adef8a7466c --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_DeleteInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py new file mode 100644 index 000000000000..50f83b7f75f5 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_DeleteInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py new file mode 100644 index 000000000000..6667f548aeaa --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ExportData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ExportData_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py new file mode 100644 index 000000000000..aa13d529c028 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ExportData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ExportData_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py new file mode 100644 index 000000000000..1062bd309c96 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_GetInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py new file mode 100644 index 000000000000..eddbfa698810 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_GetInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py new file mode 100644 index 000000000000..7d8d7a3a062f --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ImportData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ImportData_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py new file mode 100644 index 000000000000..8e1753b883a9 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ImportData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ImportData_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py new file mode 100644 index 000000000000..8eca19544f79 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ListInstances_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py new file mode 100644 index 000000000000..38ed25cccec7 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ListInstances_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py new file mode 100644 index 000000000000..150644e36304 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_UpdateInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py new file mode 100644 index 000000000000..c8283d173411 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_UpdateInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json new file mode 100644 index 000000000000..aec0635b88a7 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json @@ -0,0 +1,1150 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.parallelstore.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-parallelstore", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.create_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.CreateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "parallelstore_v1_generated_parallelstore_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_CreateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.create_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.CreateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "parallelstore_v1_generated_parallelstore_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_CreateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.delete_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.DeleteInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "parallelstore_v1_generated_parallelstore_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_DeleteInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.delete_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.DeleteInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "parallelstore_v1_generated_parallelstore_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.export_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ExportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ExportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ExportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_data" + }, + "description": "Sample for ExportData", + "file": "parallelstore_v1_generated_parallelstore_export_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ExportData_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_export_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.export_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ExportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ExportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ExportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_data" + }, + "description": "Sample for ExportData", + "file": "parallelstore_v1_generated_parallelstore_export_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ExportData_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_export_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.GetInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "parallelstore_v1_generated_parallelstore_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.get_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.GetInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "parallelstore_v1_generated_parallelstore_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.import_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ImportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ImportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ImportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_data" + }, + "description": "Sample for ImportData", + "file": "parallelstore_v1_generated_parallelstore_import_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ImportData_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_import_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.import_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ImportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ImportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ImportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_data" + }, + "description": "Sample for ImportData", + "file": "parallelstore_v1_generated_parallelstore_import_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ImportData_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_import_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ListInstances", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "parallelstore_v1_generated_parallelstore_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.list_instances", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ListInstances", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "parallelstore_v1_generated_parallelstore_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.update_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.UpdateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "parallelstore_v1_generated_parallelstore_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_UpdateInstance_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.update_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.UpdateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "parallelstore_v1_generated_parallelstore_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_UpdateInstance_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_update_instance_sync.py" + } + ] +} diff --git a/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py b/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py new file mode 100644 index 000000000000..7808e4ae2c7d --- /dev/null +++ b/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class parallelstoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', ), + 'delete_instance': ('name', 'request_id', ), + 'export_data': ('name', 'source_parallelstore', 'destination_gcs_bucket', 'request_id', 'service_account', ), + 'get_instance': ('name', ), + 'import_data': ('name', 'source_gcs_bucket', 'destination_parallelstore', 'request_id', 'service_account', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=parallelstoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the parallelstore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py new file mode 100644 index 000000000000..8f1b10a1e2e9 --- /dev/null +++ b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py @@ -0,0 +1,8278 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.parallelstore_v1.services.parallelstore import ( + ParallelstoreAsyncClient, + ParallelstoreClient, + pagers, + transports, +) +from google.cloud.parallelstore_v1.types import parallelstore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ParallelstoreClient._get_default_mtls_endpoint(None) is None + assert ( + ParallelstoreClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ParallelstoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ParallelstoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ParallelstoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ParallelstoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ParallelstoreClient._get_client_cert_source(None, False) is None + assert ( + ParallelstoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ParallelstoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ParallelstoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ParallelstoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + default_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ParallelstoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ParallelstoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, default_universe, "always") + == ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ParallelstoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ParallelstoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ParallelstoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ParallelstoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ParallelstoreClient._get_universe_domain(None, None) + == ParallelstoreClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ParallelstoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc"), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ParallelstoreClient, "grpc"), + (ParallelstoreAsyncClient, "grpc_asyncio"), + (ParallelstoreClient, "rest"), + ], +) +def test_parallelstore_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "parallelstore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ParallelstoreGrpcTransport, "grpc"), + (transports.ParallelstoreGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ParallelstoreRestTransport, "rest"), + ], +) +def test_parallelstore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ParallelstoreClient, "grpc"), + (ParallelstoreAsyncClient, "grpc_asyncio"), + (ParallelstoreClient, "rest"), + ], +) +def test_parallelstore_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "parallelstore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com" + ) + + +def test_parallelstore_client_get_transport_class(): + transport = ParallelstoreClient.get_transport_class() + available_transports = [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreRestTransport, + ] + assert transport in available_transports + + transport = ParallelstoreClient.get_transport_class("grpc") + assert transport == transports.ParallelstoreGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest"), + ], +) +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +def test_parallelstore_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ParallelstoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ParallelstoreClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc", "true"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc", "false"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest", "true"), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_parallelstore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ParallelstoreClient, ParallelstoreAsyncClient] +) +@mock.patch.object( + ParallelstoreClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ParallelstoreAsyncClient), +) +def test_parallelstore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ParallelstoreClient, ParallelstoreAsyncClient] +) +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +def test_parallelstore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + default_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest"), + ], +) +def test_parallelstore_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ParallelstoreClient, + transports.ParallelstoreGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest", None), + ], +) +def test_parallelstore_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_parallelstore_client_client_options_from_dict(): + with mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ParallelstoreClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ParallelstoreClient, + transports.ParallelstoreGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_parallelstore_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "parallelstore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="parallelstore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ListInstancesRequest() + + +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_instances_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_instances + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_instances + ] = mock_rpc + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=parallelstore.ListInstancesRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = parallelstore.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse() + ) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_instances_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + parallelstore.ListInstancesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + parallelstore.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, parallelstore.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, parallelstore.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, parallelstore.Instance) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == parallelstore.Instance.State.CREATING + assert response.capacity_gib == 1247 + assert response.daos_version == "daos_version_value" + assert response.access_points == ["access_points_value"] + assert response.network == "network_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.effective_reserved_ip_range == "effective_reserved_ip_range_value" + assert ( + response.file_stripe_level + == parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN + ) + assert ( + response.directory_stripe_level + == parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN + ) + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.GetInstanceRequest() + + +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.GetInstanceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.GetInstanceRequest( + name="name_value", + ) + + +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + ) + response = await client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_instance + ] = mock_rpc + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.GetInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + ) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, parallelstore.Instance) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == parallelstore.Instance.State.CREATING + assert response.capacity_gib == 1247 + assert response.daos_version == "daos_version_value" + assert response.access_points == ["access_points_value"] + assert response.network == "network_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.effective_reserved_ip_range == "effective_reserved_ip_range_value" + assert ( + response.file_stripe_level + == parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN + ) + assert ( + response.directory_stripe_level + == parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN + ) + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = parallelstore.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance() + ) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + parallelstore.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + parallelstore.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.CreateInstanceRequest() + + +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + request_id="request_id_value", + ) + + +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_instance + ] = mock_rpc + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.CreateInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + parallelstore.CreateInstanceRequest(), + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + parallelstore.CreateInstanceRequest(), + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.UpdateInstanceRequest() + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.UpdateInstanceRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.UpdateInstanceRequest( + request_id="request_id_value", + ) + + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_instance + ] = mock_rpc + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.UpdateInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +def test_update_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + parallelstore.UpdateInstanceRequest(), + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + parallelstore.UpdateInstanceRequest(), + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.DeleteInstanceRequest() + + +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.DeleteInstanceRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.DeleteInstanceRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_instance + ] = mock_rpc + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.DeleteInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + parallelstore.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + parallelstore.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ImportDataRequest, + dict, + ], +) +def test_import_data(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.ImportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_data_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ImportDataRequest() + + +def test_import_data_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.ImportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_data(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ImportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + +def test_import_data_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_data] = mock_rpc + request = {} + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_import_data_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ImportDataRequest() + + +@pytest.mark.asyncio +async def test_import_data_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.import_data + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.import_data + ] = mock_rpc + + request = {} + await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_import_data_async( + transport: str = "grpc_asyncio", request_type=parallelstore.ImportDataRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.ImportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_data_async_from_dict(): + await test_import_data_async(request_type=dict) + + +def test_import_data_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ImportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_data_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ImportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ExportDataRequest, + dict, + ], +) +def test_export_data(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.ExportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_data_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ExportDataRequest() + + +def test_export_data_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.ExportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_data(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ExportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + +def test_export_data_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_data] = mock_rpc + request = {} + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_data_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ExportDataRequest() + + +@pytest.mark.asyncio +async def test_export_data_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.export_data + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.export_data + ] = mock_rpc + + request = {} + await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_data_async( + transport: str = "grpc_asyncio", request_type=parallelstore.ExportDataRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.ExportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_data_async_from_dict(): + await test_export_data_async(request_type=dict) + + +def test_export_data_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ExportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_data_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ExportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields( + request_type=parallelstore.ListInstancesRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = parallelstore.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = parallelstore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.ListInstancesRequest.pb( + parallelstore.ListInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = parallelstore.ListInstancesResponse.to_json( + parallelstore.ListInstancesResponse() + ) + + request = parallelstore.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = parallelstore.ListInstancesResponse() + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=parallelstore.ListInstancesRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + parallelstore.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + parallelstore.ListInstancesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, parallelstore.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, parallelstore.Instance) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == parallelstore.Instance.State.CREATING + assert response.capacity_gib == 1247 + assert response.daos_version == "daos_version_value" + assert response.access_points == ["access_points_value"] + assert response.network == "network_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.effective_reserved_ip_range == "effective_reserved_ip_range_value" + assert ( + response.file_stripe_level + == parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN + ) + assert ( + response.directory_stripe_level + == parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN + ) + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields( + request_type=parallelstore.GetInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = parallelstore.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = parallelstore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.GetInstanceRequest.pb( + parallelstore.GetInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = parallelstore.Instance.to_json( + parallelstore.Instance() + ) + + request = parallelstore.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = parallelstore.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.GetInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + parallelstore.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "description": "description_value", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "capacity_gib": 1247, + "daos_version": "daos_version_value", + "access_points": ["access_points_value1", "access_points_value2"], + "network": "network_value", + "reserved_ip_range": "reserved_ip_range_value", + "effective_reserved_ip_range": "effective_reserved_ip_range_value", + "file_stripe_level": 1, + "directory_stripe_level": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = parallelstore.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields( + request_type=parallelstore.CreateInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.CreateInstanceRequest.pb( + parallelstore.CreateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.CreateInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + parallelstore.CreateInstanceRequest(), + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "description": "description_value", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "capacity_gib": 1247, + "daos_version": "daos_version_value", + "access_points": ["access_points_value1", "access_points_value2"], + "network": "network_value", + "reserved_ip_range": "reserved_ip_range_value", + "effective_reserved_ip_range": "effective_reserved_ip_range_value", + "file_stripe_level": 1, + "directory_stripe_level": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = parallelstore.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields( + request_type=parallelstore.UpdateInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.UpdateInstanceRequest.pb( + parallelstore.UpdateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.UpdateInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + parallelstore.UpdateInstanceRequest(), + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields( + request_type=parallelstore.DeleteInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.DeleteInstanceRequest.pb( + parallelstore.DeleteInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.DeleteInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + parallelstore.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ImportDataRequest, + dict, + ], +) +def test_import_data_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_data(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_data_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_data] = mock_rpc + + request = {} + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_import_data_rest_required_fields(request_type=parallelstore.ImportDataRequest): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_data(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_data_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_data_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_import_data" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_import_data" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.ImportDataRequest.pb( + parallelstore.ImportDataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.ImportDataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_data( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_data_rest_bad_request( + transport: str = "rest", request_type=parallelstore.ImportDataRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_data(request) + + +def test_import_data_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ExportDataRequest, + dict, + ], +) +def test_export_data_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_data(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_data_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_data] = mock_rpc + + request = {} + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_export_data_rest_required_fields(request_type=parallelstore.ExportDataRequest): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_data(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_data_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_data_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_export_data" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_export_data" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.ExportDataRequest.pb( + parallelstore.ExportDataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.ExportDataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_data( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_data_rest_bad_request( + transport: str = "rest", request_type=parallelstore.ExportDataRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_data(request) + + +def test_export_data_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ParallelstoreClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ParallelstoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + transports.ParallelstoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ParallelstoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ParallelstoreGrpcTransport, + ) + + +def test_parallelstore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ParallelstoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_parallelstore_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ParallelstoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "import_data", + "export_data", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_parallelstore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ParallelstoreTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_parallelstore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ParallelstoreTransport() + adc.assert_called_once() + + +def test_parallelstore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ParallelstoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + transports.ParallelstoreRestTransport, + ], +) +def test_parallelstore_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ParallelstoreGrpcTransport, grpc_helpers), + (transports.ParallelstoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_parallelstore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "parallelstore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="parallelstore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_parallelstore_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ParallelstoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_parallelstore_rest_lro_client(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_parallelstore_host_no_port(transport_name): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="parallelstore.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "parallelstore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_parallelstore_host_with_port(transport_name): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="parallelstore.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "parallelstore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_parallelstore_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ParallelstoreClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ParallelstoreClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.import_data._session + session2 = client2.transport.import_data._session + assert session1 != session2 + session1 = client1.transport.export_data._session + session2 = client2.transport.export_data._session + assert session1 != session2 + + +def test_parallelstore_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ParallelstoreGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_parallelstore_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ParallelstoreGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_parallelstore_grpc_lro_client(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_parallelstore_grpc_lro_async_client(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_address_path(): + project = "squid" + region = "clam" + address = "whelk" + expected = "projects/{project}/regions/{region}/addresses/{address}".format( + project=project, + region=region, + address=address, + ) + actual = ParallelstoreClient.address_path(project, region, address) + assert expected == actual + + +def test_parse_address_path(): + expected = { + "project": "octopus", + "region": "oyster", + "address": "nudibranch", + } + path = ParallelstoreClient.address_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_address_path(path) + assert expected == actual + + +def test_instance_path(): + project = "cuttlefish" + location = "mussel" + instance = "winkle" + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + actual = ParallelstoreClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "instance": "abalone", + } + path = ParallelstoreClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_instance_path(path) + assert expected == actual + + +def test_network_path(): + project = "squid" + network = "clam" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = ParallelstoreClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "whelk", + "network": "octopus", + } + path = ParallelstoreClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_network_path(path) + assert expected == actual + + +def test_service_account_path(): + project = "oyster" + service_account = "nudibranch" + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) + actual = ParallelstoreClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "cuttlefish", + "service_account": "mussel", + } + path = ParallelstoreClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_service_account_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ParallelstoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = ParallelstoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ParallelstoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = ParallelstoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ParallelstoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = ParallelstoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = ParallelstoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = ParallelstoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ParallelstoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = ParallelstoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ParallelstoreTransport, "_prep_wrapped_messages" + ) as prep: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ParallelstoreTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ParallelstoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = ParallelstoreClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ParallelstoreAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport), + (ParallelstoreAsyncClient, transports.ParallelstoreGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 052585c63dfa172b7f88fdb5882eda446fc47bfe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:26:14 -0400 Subject: [PATCH 44/59] feat: [google-cloud-bigquery-datatransfer]Add scheduleOptionsV2 and Error fields for TransferConfig (#13116) BEGIN_COMMIT_OVERRIDE feat:Add scheduleOptionsV2 and Error fields for TransferConfig END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 680586383 Source-Link: https://github.com/googleapis/googleapis/commit/463b5a6b06e20504fb44bfedff59ba05b42bf0b2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/68d602fca86cfbf7653612f50c5cf9e3105065c9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGF0cmFuc2Zlci8uT3dsQm90LnlhbWwiLCJoIjoiNjhkNjAyZmNhODZjZmJmNzY1MzYxMmY1MGM1Y2Y5ZTMxMDUwNjVjOSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../cloud/bigquery_datatransfer/__init__.py | 8 + .../bigquery_datatransfer/gapic_version.py | 2 +- .../bigquery_datatransfer_v1/__init__.py | 8 + .../bigquery_datatransfer_v1/gapic_version.py | 2 +- .../types/__init__.py | 8 + .../types/transfer.py | 147 ++++++++++++++++++ ...google.cloud.bigquery.datatransfer.v1.json | 2 +- .../test_data_transfer_service.py | 43 +++++ 8 files changed, 217 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py index c2cce6e9d9e7..bb51c87c8254 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py @@ -54,7 +54,11 @@ from google.cloud.bigquery_datatransfer_v1.types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -93,7 +97,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index fc64b41dd679..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py index 41ad09552699..7df301ab3c59 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -52,7 +52,11 @@ from .types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -74,6 +78,7 @@ "EmailPreferences", "EncryptionConfiguration", "EnrollDataSourcesRequest", + "EventDrivenSchedule", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -85,11 +90,14 @@ "ListTransferLogsResponse", "ListTransferRunsRequest", "ListTransferRunsResponse", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", "ScheduleTransferRunsRequest", "ScheduleTransferRunsResponse", "StartManualTransferRunsRequest", "StartManualTransferRunsResponse", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index fc64b41dd679..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py index 2caa0e24a50d..f704ac5f758d 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -43,7 +43,11 @@ from .transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -80,7 +84,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py index 4403154949f8..bd37dfdec84b 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -30,6 +30,10 @@ "TransferState", "EmailPreferences", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", + "ManualSchedule", + "EventDrivenSchedule", "UserInfo", "TransferConfig", "EncryptionConfiguration", @@ -144,6 +148,130 @@ class ScheduleOptions(proto.Message): ) +class ScheduleOptionsV2(proto.Message): + r"""V2 options customizing different types of data transfer + schedule. This field supports existing time-based and manual + transfer schedule. Also supports Event-Driven transfer schedule. + ScheduleOptionsV2 cannot be used together with + ScheduleOptions/Schedule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + time_based_schedule (google.cloud.bigquery_datatransfer_v1.types.TimeBasedSchedule): + Time based transfer schedule options. This is + the default schedule option. + + This field is a member of `oneof`_ ``schedule``. + manual_schedule (google.cloud.bigquery_datatransfer_v1.types.ManualSchedule): + Manual transfer schedule. If set, the transfer run will not + be auto-scheduled by the system, unless the client invokes + StartManualTransferRuns. This is equivalent to + disable_auto_scheduling = true. + + This field is a member of `oneof`_ ``schedule``. + event_driven_schedule (google.cloud.bigquery_datatransfer_v1.types.EventDrivenSchedule): + Event driven transfer schedule options. If + set, the transfer will be scheduled upon events + arrial. + + This field is a member of `oneof`_ ``schedule``. + """ + + time_based_schedule: "TimeBasedSchedule" = proto.Field( + proto.MESSAGE, + number=1, + oneof="schedule", + message="TimeBasedSchedule", + ) + manual_schedule: "ManualSchedule" = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message="ManualSchedule", + ) + event_driven_schedule: "EventDrivenSchedule" = proto.Field( + proto.MESSAGE, + number=3, + oneof="schedule", + message="EventDrivenSchedule", + ) + + +class TimeBasedSchedule(proto.Message): + r"""Options customizing the time based transfer schedule. + Options are migrated from the original ScheduleOptions message. + + Attributes: + schedule (str): + Data transfer schedule. If the data source does not support + a custom schedule, this should be empty. If it is empty, the + default value for the data source will be used. The + specified times are in UTC. Examples of valid format: + ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers + depends on the data source; refer to the documentation for + your data source. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Specifies time to start scheduling transfer + runs. The first run will be scheduled at or + after the start time according to a recurrence + pattern defined in the schedule string. The + start time can be changed at any moment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Defines time to stop scheduling transfer + runs. A transfer run cannot be scheduled at or + after the end time. The end time can be changed + at any moment. + """ + + schedule: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ManualSchedule(proto.Message): + r"""Options customizing manual transfers schedule.""" + + +class EventDrivenSchedule(proto.Message): + r"""Options customizing EventDriven transfers schedule. + + Attributes: + pubsub_subscription (str): + Pub/Sub subscription name used to receive + events. Only Google Cloud Storage data source + support this option. Format: + projects/{project}/subscriptions/{subscription} + """ + + pubsub_subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + class UserInfo(proto.Message): r"""Information about a user. @@ -222,6 +350,11 @@ class TransferConfig(proto.Message): schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): Options customizing the data transfer schedule. + schedule_options_v2 (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptionsV2): + Options customizing different types of data transfer + schedule. This field replaces "schedule" and + "schedule_options" fields. ScheduleOptionsV2 cannot be used + together with ScheduleOptions/Schedule. data_refresh_window_days (int): The number of days to look back to automatically refresh the data. For example, if ``data_refresh_window_days = 10``, @@ -274,6 +407,10 @@ class TransferConfig(proto.Message): effect. Write methods will apply the key if it is present, or otherwise try to apply project default keys if it is absent. + error (google.rpc.status_pb2.Status): + Output only. Error code with detailed + information about reason of the latest config + failure. """ name: str = proto.Field( @@ -307,6 +444,11 @@ class TransferConfig(proto.Message): number=24, message="ScheduleOptions", ) + schedule_options_v2: "ScheduleOptionsV2" = proto.Field( + proto.MESSAGE, + number=31, + message="ScheduleOptionsV2", + ) data_refresh_window_days: int = proto.Field( proto.INT32, number=12, @@ -358,6 +500,11 @@ class TransferConfig(proto.Message): number=28, message="EncryptionConfiguration", ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=32, + message=status_pb2.Status, + ) class EncryptionConfiguration(proto.Message): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index da58d7e46817..adc8c281da8f 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 527a6e81160e..f144355cd636 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -35,6 +35,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format @@ -8880,6 +8881,17 @@ def test_create_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -8891,6 +8903,16 @@ def test_create_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9327,6 +9349,17 @@ def test_update_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -9338,6 +9371,16 @@ def test_update_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency From 8d79ca81a3f2f01a1f0c77231e77566860f1d4ab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:52:13 -0400 Subject: [PATCH 45/59] feat: [google-shopping-merchant-accounts] add 'force' parameter for accounts.delete method (#13111) BEGIN_COMMIT_OVERRIDE feat: add 'force' parameter for accounts.delete method docs: updated descriptions for the DeleteAccount and ListAccounts RPCs fix!: The type of an existing field `time_zone` is changed from `message` to `string` in message `.google.shopping.merchant.accounts.v1beta.ListAccountIssuesRequest` fix!: An existing field `account_aggregation` is removed from message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `service` in message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `region_code` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` fix!: Changed field behavior for an existing field `kind` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` feat: A new field `account_aggregation` is added to message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` feat: A new message `AccountAggregation` is added feat: A new service `AutofeedSettingsService` is added feat: A new message `AutofeedSettings` is added feat: A new resource_definition `[merchantapi.googleapis.com/AutofeedSettings](https://www.google.com/url?sa=D&q=http%3A%2F%2Fmerchantapi.googleapis.com%2FAutofeedSettings)` is added feat: A new message `GetAutofeedSettingsRequest` is added feat: A new message `UpdateAutofeedSettingsRequest` is added feat: A new field `korean_business_registration_number` is added to message `.google.shopping.merchant.accounts.v1beta.BusinessInfo` END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: updated descriptions for the DeleteAccount and ListAccounts RPCs PiperOrigin-RevId: 680468173 Source-Link: https://github.com/googleapis/googleapis/commit/1b2f804bf43253118ff0e56f9524979265afdfe6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/aaf00b070e000374e0f531319e4b7dd797de3165 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWFjY291bnRzLy5Pd2xCb3QueWFtbCIsImgiOiJhYWYwMGIwNzBlMDAwMzc0ZTBmNTMxMzE5ZTRiN2RkNzk3ZGUzMTY1In0= BEGIN_NESTED_COMMIT fix!: [google-shopping-merchant-accounts] The type of an existing field `time_zone` is changed from `message` to `string` in message `.google.shopping.merchant.accounts.v1beta.ListAccountIssuesRequest` fix!: An existing field `account_aggregation` is removed from message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `service` in message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `region_code` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` fix!: Changed field behavior for an existing field `kind` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` feat: A new field `account_aggregation` is added to message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` feat: A new message `AccountAggregation` is added feat: A new service `AutofeedSettingsService` is added feat: A new message `AutofeedSettings` is added feat: A new resource_definition `merchantapi.googleapis.com/AutofeedSettings` is added feat: A new message `GetAutofeedSettingsRequest` is added feat: A new message `UpdateAutofeedSettingsRequest` is added feat: A new field `korean_business_registration_number` is added to message `.google.shopping.merchant.accounts.v1beta.BusinessInfo` PiperOrigin-RevId: 678841094 Source-Link: https://github.com/googleapis/googleapis/commit/005df4681b89bd204a90b76168a6dc9d9e7bf4fe Source-Link: https://github.com/googleapis/googleapis-gen/commit/1c58da100531d09e9123331d121f410e7d00e4aa Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWFjY291bnRzLy5Pd2xCb3QueWFtbCIsImgiOiIxYzU4ZGExMDA1MzFkMDllOTEyMzMzMWQxMjFmNDEwZTdkMDBlNGFhIn0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: ohmayr --- .../autofeed_settings_service.rst | 6 + .../merchant_accounts_v1beta/services_.rst | 1 + .../shopping/merchant_accounts/__init__.py | 20 + .../merchant_accounts_v1beta/__init__.py | 16 + .../gapic_metadata.json | 49 + .../services/accounts_service/async_client.py | 13 +- .../services/accounts_service/client.py | 13 +- .../accounts_service/transports/grpc.py | 13 +- .../transports/grpc_asyncio.py | 13 +- .../autofeed_settings_service/__init__.py | 22 + .../autofeed_settings_service/async_client.py | 519 +++ .../autofeed_settings_service/client.py | 931 +++++ .../transports/__init__.py | 41 + .../transports/base.py | 181 + .../transports/grpc.py | 304 ++ .../transports/grpc_asyncio.py | 320 ++ .../transports/rest.py | 454 +++ .../email_preferences_service/async_client.py | 4 +- .../email_preferences_service/client.py | 4 +- .../async_client.py | 6 +- .../online_return_policy_service/client.py | 6 +- .../shipping_settings_service/async_client.py | 4 +- .../shipping_settings_service/client.py | 4 +- .../transports/rest.py | 8 +- .../async_client.py | 2 + .../client.py | 2 + .../terms_of_service_service/async_client.py | 2 + .../terms_of_service_service/client.py | 2 + .../transports/rest.py | 14 + .../types/__init__.py | 10 + .../types/accountissue.py | 8 +- .../types/accounts.py | 44 +- .../types/accountservices.py | 34 + .../types/autofeedsettings.py | 109 + .../types/businessinfo.py | 11 + .../types/shippingsettings.py | 27 +- .../types/termsofservice.py | 6 +- .../types/termsofserviceagreementstate.py | 3 + ...ngs_service_get_autofeed_settings_async.py | 52 + ...ings_service_get_autofeed_settings_sync.py | 52 + ..._service_update_autofeed_settings_async.py | 55 + ...s_service_update_autofeed_settings_sync.py | 55 + ..._retrieve_latest_terms_of_service_async.py | 2 + ...e_retrieve_latest_terms_of_service_sync.py | 2 + ...gle.shopping.merchant.accounts.v1beta.json | 358 +- ...fixup_merchant_accounts_v1beta_keywords.py | 6 +- .../test_account_issue_service.py | 3 +- .../test_accounts_service.py | 22 +- .../test_autofeed_settings_service.py | 3474 +++++++++++++++++ .../test_business_info_service.py | 33 + .../test_terms_of_service_service.py | 114 + 51 files changed, 7363 insertions(+), 91 deletions(-) create mode 100644 packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py create mode 100644 packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst new file mode 100644 index 000000000000..7d16dfb96a4b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst @@ -0,0 +1,6 @@ +AutofeedSettingsService +----------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst index 1c502de92ff3..b9c209efb0f7 100644 --- a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst @@ -6,6 +6,7 @@ Services for Google Shopping Merchant Accounts v1beta API account_issue_service accounts_service account_tax_service + autofeed_settings_service business_identity_service business_info_service email_preferences_service diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py index 97bb9de71cdf..2af1a6d2c96c 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py @@ -36,6 +36,12 @@ from google.shopping.merchant_accounts_v1beta.services.accounts_service.client import ( AccountsServiceClient, ) +from google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.async_client import ( + AutofeedSettingsServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.client import ( + AutofeedSettingsServiceClient, +) from google.shopping.merchant_accounts_v1beta.services.business_identity_service.async_client import ( BusinessIdentityServiceAsyncClient, ) @@ -126,6 +132,14 @@ ListSubAccountsResponse, UpdateAccountRequest, ) +from google.shopping.merchant_accounts_v1beta.types.accountservices import ( + AccountAggregation, +) +from google.shopping.merchant_accounts_v1beta.types.autofeedsettings import ( + AutofeedSettings, + GetAutofeedSettingsRequest, + UpdateAutofeedSettingsRequest, +) from google.shopping.merchant_accounts_v1beta.types.businessidentity import ( BusinessIdentity, GetBusinessIdentityRequest, @@ -234,6 +248,8 @@ "AccountsServiceAsyncClient", "AccountTaxServiceClient", "AccountTaxServiceAsyncClient", + "AutofeedSettingsServiceClient", + "AutofeedSettingsServiceAsyncClient", "BusinessIdentityServiceClient", "BusinessIdentityServiceAsyncClient", "BusinessInfoServiceClient", @@ -274,6 +290,10 @@ "ListSubAccountsRequest", "ListSubAccountsResponse", "UpdateAccountRequest", + "AccountAggregation", + "AutofeedSettings", + "GetAutofeedSettingsRequest", + "UpdateAutofeedSettingsRequest", "BusinessIdentity", "GetBusinessIdentityRequest", "UpdateBusinessIdentityRequest", diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py index 645cb37080cd..fd3bbce8a284 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py @@ -27,6 +27,10 @@ AccountTaxServiceClient, ) from .services.accounts_service import AccountsServiceAsyncClient, AccountsServiceClient +from .services.autofeed_settings_service import ( + AutofeedSettingsServiceAsyncClient, + AutofeedSettingsServiceClient, +) from .services.business_identity_service import ( BusinessIdentityServiceAsyncClient, BusinessIdentityServiceClient, @@ -83,6 +87,12 @@ ListSubAccountsResponse, UpdateAccountRequest, ) +from .types.accountservices import AccountAggregation +from .types.autofeedsettings import ( + AutofeedSettings, + GetAutofeedSettingsRequest, + UpdateAutofeedSettingsRequest, +) from .types.businessidentity import ( BusinessIdentity, GetBusinessIdentityRequest, @@ -182,6 +192,7 @@ "AccountIssueServiceAsyncClient", "AccountTaxServiceAsyncClient", "AccountsServiceAsyncClient", + "AutofeedSettingsServiceAsyncClient", "BusinessIdentityServiceAsyncClient", "BusinessInfoServiceAsyncClient", "EmailPreferencesServiceAsyncClient", @@ -197,12 +208,15 @@ "Accepted", "AccessRight", "Account", + "AccountAggregation", "AccountIssue", "AccountIssueServiceClient", "AccountTax", "AccountTaxServiceClient", "AccountsServiceClient", "Address", + "AutofeedSettings", + "AutofeedSettingsServiceClient", "BusinessDayConfig", "BusinessIdentity", "BusinessIdentityServiceClient", @@ -226,6 +240,7 @@ "EnableProgramRequest", "GetAccountRequest", "GetAccountTaxRequest", + "GetAutofeedSettingsRequest", "GetBusinessIdentityRequest", "GetBusinessInfoRequest", "GetEmailPreferencesRequest", @@ -285,6 +300,7 @@ "UnclaimHomepageRequest", "UpdateAccountRequest", "UpdateAccountTaxRequest", + "UpdateAutofeedSettingsRequest", "UpdateBusinessIdentityRequest", "UpdateBusinessInfoRequest", "UpdateEmailPreferencesRequest", diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json index 3823aac9f4fa..2e91ad70f4fb 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json @@ -212,6 +212,55 @@ } } }, + "AutofeedSettingsService": { + "clients": { + "grpc": { + "libraryClient": "AutofeedSettingsServiceClient", + "rpcs": { + "GetAutofeedSettings": { + "methods": [ + "get_autofeed_settings" + ] + }, + "UpdateAutofeedSettings": { + "methods": [ + "update_autofeed_settings" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AutofeedSettingsServiceAsyncClient", + "rpcs": { + "GetAutofeedSettings": { + "methods": [ + "get_autofeed_settings" + ] + }, + "UpdateAutofeedSettings": { + "methods": [ + "update_autofeed_settings" + ] + } + } + }, + "rest": { + "libraryClient": "AutofeedSettingsServiceClient", + "rpcs": { + "GetAutofeedSettings": { + "methods": [ + "get_autofeed_settings" + ] + }, + "UpdateAutofeedSettings": { + "methods": [ + "update_autofeed_settings" + ] + } + } + } + } + }, "BusinessIdentityService": { "clients": { "grpc": { diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py index 2b9a05d02cc8..7a39ba6829d3 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py @@ -465,9 +465,12 @@ async def delete_account( metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. .. code-block:: python @@ -685,7 +688,9 @@ async def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py index b5bb3423a3df..12d7a2732205 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py @@ -904,9 +904,12 @@ def delete_account( metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. .. code-block:: python @@ -1118,7 +1121,9 @@ def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py index 9ddc7ec8854e..c2c0d8c4abc7 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py @@ -297,9 +297,12 @@ def delete_account( r"""Return a callable for the delete account method over gRPC. Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. Returns: Callable[[~.DeleteAccountRequest], @@ -358,7 +361,9 @@ def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. Returns: Callable[[~.ListAccountsRequest], diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py index 391cc69c72fe..a90a29d80462 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py @@ -305,9 +305,12 @@ def delete_account( r"""Return a callable for the delete account method over gRPC. Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. Returns: Callable[[~.DeleteAccountRequest], @@ -368,7 +371,9 @@ def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. Returns: Callable[[~.ListAccountsRequest], diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py new file mode 100644 index 000000000000..b4e4ddd5e568 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AutofeedSettingsServiceAsyncClient +from .client import AutofeedSettingsServiceClient + +__all__ = ( + "AutofeedSettingsServiceClient", + "AutofeedSettingsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py new file mode 100644 index 000000000000..e01855825f41 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py @@ -0,0 +1,519 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .client import AutofeedSettingsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport +from .transports.grpc_asyncio import AutofeedSettingsServiceGrpcAsyncIOTransport + + +class AutofeedSettingsServiceAsyncClient: + """Service to support + `autofeed `__ + setting. + """ + + _client: AutofeedSettingsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AutofeedSettingsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + + autofeed_settings_path = staticmethod( + AutofeedSettingsServiceClient.autofeed_settings_path + ) + parse_autofeed_settings_path = staticmethod( + AutofeedSettingsServiceClient.parse_autofeed_settings_path + ) + common_billing_account_path = staticmethod( + AutofeedSettingsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AutofeedSettingsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AutofeedSettingsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + AutofeedSettingsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + AutofeedSettingsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceAsyncClient: The constructed client. + """ + return AutofeedSettingsServiceClient.from_service_account_info.__func__(AutofeedSettingsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceAsyncClient: The constructed client. + """ + return AutofeedSettingsServiceClient.from_service_account_file.__func__(AutofeedSettingsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AutofeedSettingsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AutofeedSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AutofeedSettingsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = AutofeedSettingsServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AutofeedSettingsServiceTransport, + Callable[..., AutofeedSettingsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autofeed settings service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AutofeedSettingsServiceTransport,Callable[..., AutofeedSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AutofeedSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AutofeedSettingsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.GetAutofeedSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Retrieves the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest, dict]]): + The request object. Request message for the ``GetAutofeedSettings`` method. + name (:class:`str`): + Required. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.GetAutofeedSettingsRequest): + request = autofeedsettings.GetAutofeedSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_autofeed_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.UpdateAutofeedSettingsRequest, dict] + ] = None, + *, + autofeed_settings: Optional[autofeedsettings.AutofeedSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Updates the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = await client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest, dict]]): + The request object. Request message for the ``UpdateAutofeedSettings`` + method. + autofeed_settings (:class:`google.shopping.merchant_accounts_v1beta.types.AutofeedSettings`): + Required. The new version of the + autofeed setting. + + This corresponds to the ``autofeed_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([autofeed_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.UpdateAutofeedSettingsRequest): + request = autofeedsettings.UpdateAutofeedSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if autofeed_settings is not None: + request.autofeed_settings = autofeed_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_autofeed_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("autofeed_settings.name", request.autofeed_settings.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AutofeedSettingsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AutofeedSettingsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py new file mode 100644 index 000000000000..047661f9ebf1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py @@ -0,0 +1,931 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .transports.base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport +from .transports.grpc import AutofeedSettingsServiceGrpcTransport +from .transports.grpc_asyncio import AutofeedSettingsServiceGrpcAsyncIOTransport +from .transports.rest import AutofeedSettingsServiceRestTransport + + +class AutofeedSettingsServiceClientMeta(type): + """Metaclass for the AutofeedSettingsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AutofeedSettingsServiceTransport]] + _transport_registry["grpc"] = AutofeedSettingsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AutofeedSettingsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AutofeedSettingsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AutofeedSettingsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AutofeedSettingsServiceClient(metaclass=AutofeedSettingsServiceClientMeta): + """Service to support + `autofeed `__ + setting. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AutofeedSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AutofeedSettingsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def autofeed_settings_path( + account: str, + ) -> str: + """Returns a fully-qualified autofeed_settings string.""" + return "accounts/{account}/autofeedSettings".format( + account=account, + ) + + @staticmethod + def parse_autofeed_settings_path(path: str) -> Dict[str, str]: + """Parses a autofeed_settings path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/autofeedSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AutofeedSettingsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AutofeedSettingsServiceTransport, + Callable[..., AutofeedSettingsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autofeed settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AutofeedSettingsServiceTransport,Callable[..., AutofeedSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AutofeedSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AutofeedSettingsServiceClient._read_environment_variables() + self._client_cert_source = ( + AutofeedSettingsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = AutofeedSettingsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AutofeedSettingsServiceTransport) + if transport_provided: + # transport is a AutofeedSettingsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AutofeedSettingsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or AutofeedSettingsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AutofeedSettingsServiceTransport], + Callable[..., AutofeedSettingsServiceTransport], + ] = ( + AutofeedSettingsServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AutofeedSettingsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.GetAutofeedSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Retrieves the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest, dict]): + The request object. Request message for the ``GetAutofeedSettings`` method. + name (str): + Required. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.GetAutofeedSettingsRequest): + request = autofeedsettings.GetAutofeedSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_autofeed_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.UpdateAutofeedSettingsRequest, dict] + ] = None, + *, + autofeed_settings: Optional[autofeedsettings.AutofeedSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Updates the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest, dict]): + The request object. Request message for the ``UpdateAutofeedSettings`` + method. + autofeed_settings (google.shopping.merchant_accounts_v1beta.types.AutofeedSettings): + Required. The new version of the + autofeed setting. + + This corresponds to the ``autofeed_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([autofeed_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.UpdateAutofeedSettingsRequest): + request = autofeedsettings.UpdateAutofeedSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if autofeed_settings is not None: + request.autofeed_settings = autofeed_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_autofeed_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("autofeed_settings.name", request.autofeed_settings.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AutofeedSettingsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AutofeedSettingsServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py new file mode 100644 index 000000000000..e9bea840fd96 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AutofeedSettingsServiceTransport +from .grpc import AutofeedSettingsServiceGrpcTransport +from .grpc_asyncio import AutofeedSettingsServiceGrpcAsyncIOTransport +from .rest import ( + AutofeedSettingsServiceRestInterceptor, + AutofeedSettingsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AutofeedSettingsServiceTransport]] +_transport_registry["grpc"] = AutofeedSettingsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AutofeedSettingsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AutofeedSettingsServiceRestTransport + +__all__ = ( + "AutofeedSettingsServiceTransport", + "AutofeedSettingsServiceGrpcTransport", + "AutofeedSettingsServiceGrpcAsyncIOTransport", + "AutofeedSettingsServiceRestTransport", + "AutofeedSettingsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py new file mode 100644 index 000000000000..edee0c38537d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AutofeedSettingsServiceTransport(abc.ABC): + """Abstract transport class for AutofeedSettingsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_autofeed_settings: gapic_v1.method.wrap_method( + self.get_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_autofeed_settings: gapic_v1.method.wrap_method( + self.update_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], + Union[ + autofeedsettings.AutofeedSettings, + Awaitable[autofeedsettings.AutofeedSettings], + ], + ]: + raise NotImplementedError() + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + Union[ + autofeedsettings.AutofeedSettings, + Awaitable[autofeedsettings.AutofeedSettings], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AutofeedSettingsServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py new file mode 100644 index 000000000000..dbe95388a05c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport + + +class AutofeedSettingsServiceGrpcTransport(AutofeedSettingsServiceTransport): + """gRPC backend transport for AutofeedSettingsService. + + Service to support + `autofeed `__ + setting. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], autofeedsettings.AutofeedSettings + ]: + r"""Return a callable for the get autofeed settings method over gRPC. + + Retrieves the autofeed settings of an account. + + Returns: + Callable[[~.GetAutofeedSettingsRequest], + ~.AutofeedSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_autofeed_settings" not in self._stubs: + self._stubs["get_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/GetAutofeedSettings", + request_serializer=autofeedsettings.GetAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["get_autofeed_settings"] + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + autofeedsettings.AutofeedSettings, + ]: + r"""Return a callable for the update autofeed settings method over gRPC. + + Updates the autofeed settings of an account. + + Returns: + Callable[[~.UpdateAutofeedSettingsRequest], + ~.AutofeedSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_autofeed_settings" not in self._stubs: + self._stubs["update_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/UpdateAutofeedSettings", + request_serializer=autofeedsettings.UpdateAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["update_autofeed_settings"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AutofeedSettingsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..99364a401b73 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport +from .grpc import AutofeedSettingsServiceGrpcTransport + + +class AutofeedSettingsServiceGrpcAsyncIOTransport(AutofeedSettingsServiceTransport): + """gRPC AsyncIO backend transport for AutofeedSettingsService. + + Service to support + `autofeed `__ + setting. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], + Awaitable[autofeedsettings.AutofeedSettings], + ]: + r"""Return a callable for the get autofeed settings method over gRPC. + + Retrieves the autofeed settings of an account. + + Returns: + Callable[[~.GetAutofeedSettingsRequest], + Awaitable[~.AutofeedSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_autofeed_settings" not in self._stubs: + self._stubs["get_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/GetAutofeedSettings", + request_serializer=autofeedsettings.GetAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["get_autofeed_settings"] + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + Awaitable[autofeedsettings.AutofeedSettings], + ]: + r"""Return a callable for the update autofeed settings method over gRPC. + + Updates the autofeed settings of an account. + + Returns: + Callable[[~.UpdateAutofeedSettingsRequest], + Awaitable[~.AutofeedSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_autofeed_settings" not in self._stubs: + self._stubs["update_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/UpdateAutofeedSettings", + request_serializer=autofeedsettings.UpdateAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["update_autofeed_settings"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_autofeed_settings: gapic_v1.method_async.wrap_method( + self.get_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_autofeed_settings: gapic_v1.method_async.wrap_method( + self.update_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AutofeedSettingsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py new file mode 100644 index 000000000000..57aff5878d5f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .base import AutofeedSettingsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AutofeedSettingsServiceRestInterceptor: + """Interceptor for AutofeedSettingsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AutofeedSettingsServiceRestTransport. + + .. code-block:: python + class MyCustomAutofeedSettingsServiceInterceptor(AutofeedSettingsServiceRestInterceptor): + def pre_get_autofeed_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_autofeed_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_autofeed_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_autofeed_settings(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AutofeedSettingsServiceRestTransport(interceptor=MyCustomAutofeedSettingsServiceInterceptor()) + client = AutofeedSettingsServiceClient(transport=transport) + + + """ + + def pre_get_autofeed_settings( + self, + request: autofeedsettings.GetAutofeedSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[autofeedsettings.GetAutofeedSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_autofeed_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutofeedSettingsService server. + """ + return request, metadata + + def post_get_autofeed_settings( + self, response: autofeedsettings.AutofeedSettings + ) -> autofeedsettings.AutofeedSettings: + """Post-rpc interceptor for get_autofeed_settings + + Override in a subclass to manipulate the response + after it is returned by the AutofeedSettingsService server but before + it is returned to user code. + """ + return response + + def pre_update_autofeed_settings( + self, + request: autofeedsettings.UpdateAutofeedSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autofeedsettings.UpdateAutofeedSettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_autofeed_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutofeedSettingsService server. + """ + return request, metadata + + def post_update_autofeed_settings( + self, response: autofeedsettings.AutofeedSettings + ) -> autofeedsettings.AutofeedSettings: + """Post-rpc interceptor for update_autofeed_settings + + Override in a subclass to manipulate the response + after it is returned by the AutofeedSettingsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AutofeedSettingsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AutofeedSettingsServiceRestInterceptor + + +class AutofeedSettingsServiceRestTransport(AutofeedSettingsServiceTransport): + """REST backend transport for AutofeedSettingsService. + + Service to support + `autofeed `__ + setting. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AutofeedSettingsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AutofeedSettingsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetAutofeedSettings(AutofeedSettingsServiceRestStub): + def __hash__(self): + return hash("GetAutofeedSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autofeedsettings.GetAutofeedSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Call the get autofeed settings method over HTTP. + + Args: + request (~.autofeedsettings.GetAutofeedSettingsRequest): + The request object. Request message for the ``GetAutofeedSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autofeedsettings.AutofeedSettings: + Collection of information related to the + `autofeed `__ + settings. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/autofeedSettings}", + }, + ] + request, metadata = self._interceptor.pre_get_autofeed_settings( + request, metadata + ) + pb_request = autofeedsettings.GetAutofeedSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autofeedsettings.AutofeedSettings() + pb_resp = autofeedsettings.AutofeedSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_autofeed_settings(resp) + return resp + + class _UpdateAutofeedSettings(AutofeedSettingsServiceRestStub): + def __hash__(self): + return hash("UpdateAutofeedSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autofeedsettings.UpdateAutofeedSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Call the update autofeed settings method over HTTP. + + Args: + request (~.autofeedsettings.UpdateAutofeedSettingsRequest): + The request object. Request message for the ``UpdateAutofeedSettings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autofeedsettings.AutofeedSettings: + Collection of information related to the + `autofeed `__ + settings. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{autofeed_settings.name=accounts/*/autofeedSettings}", + "body": "autofeed_settings", + }, + ] + request, metadata = self._interceptor.pre_update_autofeed_settings( + request, metadata + ) + pb_request = autofeedsettings.UpdateAutofeedSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autofeedsettings.AutofeedSettings() + pb_resp = autofeedsettings.AutofeedSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_autofeed_settings(resp) + return resp + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], autofeedsettings.AutofeedSettings + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutofeedSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + autofeedsettings.AutofeedSettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAutofeedSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AutofeedSettingsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py index e01a3b2bcc9a..570799bb8e27 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py @@ -287,7 +287,7 @@ async def get_email_preferences( r"""Returns the email preferences for a Merchant Center account user. - Use the name=accounts/*/users/me/emailPreferences alias to get + Use the ``name=accounts/*/users/me/emailPreferences`` alias to get preferences for the authenticated user. .. code-block:: python @@ -411,7 +411,7 @@ async def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the name=accounts/*/users/me/emailPreferences alias to + Use the ``name=accounts/*/users/me/emailPreferences`` alias to update preferences for the authenticated user. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py index e2d8285b4900..d1a194f82f4b 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py @@ -702,7 +702,7 @@ def get_email_preferences( r"""Returns the email preferences for a Merchant Center account user. - Use the name=accounts/*/users/me/emailPreferences alias to get + Use the ``name=accounts/*/users/me/emailPreferences`` alias to get preferences for the authenticated user. .. code-block:: python @@ -823,7 +823,7 @@ def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the name=accounts/*/users/me/emailPreferences alias to + Use the ``name=accounts/*/users/me/emailPreferences`` alias to update preferences for the authenticated user. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py index 14f3f87a6b36..edfee0c82f58 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py @@ -55,10 +55,8 @@ class OnlineReturnPolicyServiceAsyncClient: """The service facilitates the management of a merchant's remorse return policy configuration, encompassing return policies for both - ads and free listings - - programs. This API defines the following resource model: - -------------------------------------------------------- + ads and free listings programs. This API defines the following resource model: + ----------------------------------------------------------------------------- [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] """ diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py index 5159022c0212..690860b09d70 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py @@ -99,10 +99,8 @@ def get_transport_class( class OnlineReturnPolicyServiceClient(metaclass=OnlineReturnPolicyServiceClientMeta): """The service facilitates the management of a merchant's remorse return policy configuration, encompassing return policies for both - ads and free listings - - programs. This API defines the following resource model: - -------------------------------------------------------- + ads and free listings programs. This API defines the following resource model: + ----------------------------------------------------------------------------- [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] """ diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py index aab6ce594de6..6565278a30e3 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py @@ -327,7 +327,7 @@ async def sample_get_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. @@ -433,7 +433,7 @@ async def sample_insert_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py index a54e3edc102b..131a35b84f53 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py @@ -738,7 +738,7 @@ def sample_get_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. @@ -841,7 +841,7 @@ def sample_insert_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py index f93d7be50bf0..7e2a8e043d3a 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py @@ -266,8 +266,8 @@ def __call__( Returns: ~.shippingsettings.ShippingSettings: - The merchant account's [shipping - setting]((https://support.google.com/merchants/answer/6069284). + The merchant account's `shipping + setting `__. """ @@ -356,8 +356,8 @@ def __call__( Returns: ~.shippingsettings.ShippingSettings: - The merchant account's [shipping - setting]((https://support.google.com/merchants/answer/6069284). + The merchant account's `shipping + setting `__. """ diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py index c32d92d51405..128ef18fec98 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py @@ -339,6 +339,8 @@ async def sample_get_terms_of_service_agreement_state(): Required. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: + ``{TermsOfServiceKind}-{country}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py index 98cffc24a88e..bcce71b62393 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py @@ -786,6 +786,8 @@ def sample_get_terms_of_service_agreement_state(): Required. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: + ``{TermsOfServiceKind}-{country}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py index 841d23664e98..601dd62bcbee 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py @@ -402,6 +402,8 @@ async def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py index 8b47907a1926..ccf312d7ddc9 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py @@ -825,6 +825,8 @@ def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py index c2a44909985b..d6840f71ab24 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py @@ -417,6 +417,19 @@ class _RetrieveLatestTermsOfService(TermsOfServiceServiceRestStub): def __hash__(self): return hash("RetrieveLatestTermsOfService") + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "regionCode": "", + "kind": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + def __call__( self, request: termsofservice.RetrieveLatestTermsOfServiceRequest, @@ -465,6 +478,7 @@ def __call__( use_integers_for_enums=True, ) ) + query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py index fdd477be2f6a..90a54b07fd70 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py @@ -37,6 +37,12 @@ ListSubAccountsResponse, UpdateAccountRequest, ) +from .accountservices import AccountAggregation +from .autofeedsettings import ( + AutofeedSettings, + GetAutofeedSettingsRequest, + UpdateAutofeedSettingsRequest, +) from .businessidentity import ( BusinessIdentity, GetBusinessIdentityRequest, @@ -151,6 +157,10 @@ "ListSubAccountsRequest", "ListSubAccountsResponse", "UpdateAccountRequest", + "AccountAggregation", + "AutofeedSettings", + "GetAutofeedSettingsRequest", + "UpdateAutofeedSettingsRequest", "BusinessIdentity", "GetBusinessIdentityRequest", "UpdateBusinessIdentityRequest", diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py index 91e837cf849b..4cf1f6bc9f24 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py @@ -18,7 +18,6 @@ from typing import MutableMapping, MutableSequence from google.shopping.type.types import types -from google.type import datetime_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -179,7 +178,7 @@ class ListAccountIssuesRequest(proto.Message): `BCP-47 `__, such as ``en-US`` or ``sr-Latn``. If not value is provided, ``en-US`` will be used. - time_zone (google.type.datetime_pb2.TimeZone): + time_zone (str): Optional. The `IANA `__ timezone used to localize times in human-readable fields. For example 'America/Los_Angeles'. If not set, @@ -202,10 +201,9 @@ class ListAccountIssuesRequest(proto.Message): proto.STRING, number=4, ) - time_zone: datetime_pb2.TimeZone = proto.Field( - proto.MESSAGE, + time_zone: str = proto.Field( + proto.STRING, number=5, - message=datetime_pb2.TimeZone, ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py index a7b454e96fa1..fcbf154b7b41 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py @@ -17,12 +17,11 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore import proto # type: ignore -from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import accountservices, user __protobuf__ = proto.module( package="google.shopping.merchant.accounts.v1beta", @@ -136,10 +135,14 @@ class CreateAndConfigureAccountRequest(proto.Message): This field is a member of `oneof`_ ``_accept_terms_of_service``. service (MutableSequence[google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest.AddAccountService]): - Optional. If specified, an account service - between the account to be created and the - provider account is initialized as part of the - creation. + Required. An account service between the account to be + created and the provider account is initialized as part of + the creation. At least one such service needs to be + provided. Currently exactly one of these needs to be + ``account_aggregation``, which means you can only create sub + accounts, not standalone account through this method. + Additional ``account_management`` or ``product_management`` + services may be provided. """ class AcceptTermsOfService(proto.Message): @@ -147,8 +150,11 @@ class AcceptTermsOfService(proto.Message): Attributes: name (str): - Required. The resource name of the terms of - service version. + Required. The resource name of the terms of service version + in the format ``termsOfService/{version}``. To retrieve the + latest version, use the + `termsOfService.retrieveLatest `__ + method. region_code (str): Required. Region code as defined by `CLDR `__. This is either a @@ -173,9 +179,11 @@ class AddAccountService(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - account_aggregation (google.protobuf.empty_pb2.Empty): - The provider is an aggregator for the - account. + account_aggregation (google.shopping.merchant_accounts_v1beta.types.AccountAggregation): + The provider is an + `aggregator `__ + for the account. Payload for service type Account + Aggregation. This field is a member of `oneof`_ ``service_type``. provider (str): @@ -185,11 +193,11 @@ class AddAccountService(proto.Message): This field is a member of `oneof`_ ``_provider``. """ - account_aggregation: empty_pb2.Empty = proto.Field( + account_aggregation: accountservices.AccountAggregation = proto.Field( proto.MESSAGE, - number=2, + number=103, oneof="service_type", - message=empty_pb2.Empty, + message=accountservices.AccountAggregation, ) provider: str = proto.Field( proto.STRING, @@ -227,12 +235,20 @@ class DeleteAccountRequest(proto.Message): name (str): Required. The name of the account to delete. Format: ``accounts/{account}`` + force (bool): + Optional. If set to ``true``, the account is deleted even if + it provides services to other accounts or has processed + offers. """ name: str = proto.Field( proto.STRING, number=1, ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) class UpdateAccountRequest(proto.Message): diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py new file mode 100644 index 000000000000..3862f71dfbb4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AccountAggregation", + }, +) + + +class AccountAggregation(proto.Message): + r"""``AccountAggregation`` payload.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py new file mode 100644 index 000000000000..475d7f62b5ee --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AutofeedSettings", + "GetAutofeedSettingsRequest", + "UpdateAutofeedSettingsRequest", + }, +) + + +class AutofeedSettings(proto.Message): + r"""Collection of information related to the + `autofeed `__ + settings. + + Attributes: + name (str): + Identifier. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings``. + enable_products (bool): + Required. Enables or disables product crawling through the + autofeed for the given account. Autofeed accounts must meet + `certain + conditions `__, + which can be checked through the ``eligible`` field. The + account must **not** be a marketplace. When the autofeed is + enabled for the first time, the products usually appear + instantly. When re-enabling, it might take up to 24 hours + for products to appear. + eligible (bool): + Output only. Determines whether merchant is + eligible for being enrolled into an autofeed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + enable_products: bool = proto.Field( + proto.BOOL, + number=2, + ) + eligible: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetAutofeedSettingsRequest(proto.Message): + r"""Request message for the ``GetAutofeedSettings`` method. + + Attributes: + name (str): + Required. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAutofeedSettingsRequest(proto.Message): + r"""Request message for the ``UpdateAutofeedSettings`` method. + + Attributes: + autofeed_settings (google.shopping.merchant_accounts_v1beta.types.AutofeedSettings): + Required. The new version of the autofeed + setting. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + autofeed_settings: "AutofeedSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="AutofeedSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py index ab8e68e599c3..3c20aa2d7756 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py @@ -65,6 +65,12 @@ class BusinessInfo(proto.Message): business. This field is a member of `oneof`_ ``_customer_service``. + korean_business_registration_number (str): + Optional. The 10-digit `Korean business registration + number `__ + separated with dashes in the format: XXX-XX-XXXXX. + + This field is a member of `oneof`_ ``_korean_business_registration_number``. """ name: str = proto.Field( @@ -97,6 +103,11 @@ class BusinessInfo(proto.Message): optional=True, message=customerservice.CustomerService, ) + korean_business_registration_number: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) class GetBusinessInfoRequest(proto.Message): diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py index 2afe56a87508..eacef556bcd1 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py @@ -49,8 +49,8 @@ class ShippingSettings(proto.Message): - r"""The merchant account's [shipping - setting]((https://support.google.com/merchants/answer/6069284). + r"""The merchant account's `shipping + setting `__. Attributes: name (str): @@ -65,24 +65,24 @@ class ShippingSettings(proto.Message): etag (str): Required. This field is used for avoid async issue. Make sure shipping setting data - didn't change between get call and insert call. - The user should do following steps: + didn't change between get call and insert call. + The user should do following steps: - 1. Set etag field as empty string for initial + 1. Set etag field as empty string for initial shipping setting creation. - 2. After initial creation, call get method to + 2. After initial creation, call get method to obtain an etag and current shipping setting data before call insert. - 3. Modify to wanted shipping setting + 3. Modify to wanted shipping setting information. - 4. Call insert method with the wanted shipping + 4. Call insert method with the wanted shipping setting information with the etag obtained from step 2. - 5. If shipping setting data changed between step + 5. If shipping setting data changed between step 2 and step 4. Insert request will fail because the etag changes every time the shipping setting data changes. User should @@ -709,15 +709,18 @@ class DeliveryTime(proto.Message): This field is a member of `oneof`_ ``_cutoff_time``. min_handling_days (int): - Minimum number of business days spent before - an order is shipped. 0 means same day shipped, 1 - means next day shipped. + Minimum number of business days spent before an order is + shipped. 0 means same day shipped, 1 means next day shipped. + 'min_handling_days' and 'max_handling_days' should be either + set or not set at the same time. This field is a member of `oneof`_ ``_min_handling_days``. max_handling_days (int): Maximum number of business days spent before an order is shipped. 0 means same day shipped, 1 means next day shipped. Must be greater than or equal to ``min_handling_days``. + 'min_handling_days' and 'max_handling_days' should be either + set or not set at the same time. This field is a member of `oneof`_ ``_max_handling_days``. transit_time_table (google.shopping.merchant_accounts_v1beta.types.TransitTable): diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py index b2e6d71a77e1..222139600000 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py @@ -109,13 +109,13 @@ class RetrieveLatestTermsOfServiceRequest(proto.Message): Attributes: region_code (str): - Region code as defined by + Required. Region code as defined by `CLDR `__. This is either a country when the ToS applies specifically to that country or 001 when it applies globally. kind (google.shopping.merchant_accounts_v1beta.types.TermsOfServiceKind): - The Kind this terms of service version - applies to. + Required. The Kind this terms of service + version applies to. """ region_code: str = proto.Field( diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py index 15b09f8d665b..d9ffe986c203 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py @@ -66,6 +66,8 @@ class TermsOfServiceAgreementState(proto.Message): Identifier. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: ``{TermsOfServiceKind}-{country}`` + For example, an identifier could be: ``MERCHANT_CENTER-US`` region_code (str): Region code as defined by https://cldr.unicode.org/. This is the country @@ -186,6 +188,7 @@ class GetTermsOfServiceAgreementStateRequest(proto.Message): Required. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: ``{TermsOfServiceKind}-{country}`` """ name: str = proto.Field( diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py new file mode 100644 index 000000000000..d207adb015ce --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py new file mode 100644 index 000000000000..d0cadcbcce63 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py new file mode 100644 index 000000000000..68d50c7746e8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = await client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py new file mode 100644 index 000000000000..98f5f24a8c97 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py index f41dc0f89b50..918d61da5083 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py @@ -40,6 +40,8 @@ async def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py index c9cc12dbc4eb..afb5673b7735 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py @@ -40,6 +40,8 @@ def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json index a6cc2aef08d1..91ea23ca0671 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json @@ -1615,6 +1615,336 @@ ], "title": "merchantapi_v1beta_generated_accounts_service_update_account_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient", + "shortName": "AutofeedSettingsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient.get_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.GetAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "GetAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "get_autofeed_settings" + }, + "description": "Sample for GetAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient", + "shortName": "AutofeedSettingsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient.get_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.GetAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "GetAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "get_autofeed_settings" + }, + "description": "Sample for GetAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient", + "shortName": "AutofeedSettingsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient.update_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.UpdateAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "UpdateAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest" + }, + { + "name": "autofeed_settings", + "type": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "update_autofeed_settings" + }, + "description": "Sample for UpdateAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient", + "shortName": "AutofeedSettingsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient.update_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.UpdateAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "UpdateAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest" + }, + { + "name": "autofeed_settings", + "type": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "update_autofeed_settings" + }, + "description": "Sample for UpdateAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py" + }, { "canonical": true, "clientMethod": { @@ -6027,12 +6357,12 @@ "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_async", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -6042,18 +6372,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -6103,12 +6433,12 @@ "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_sync", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -6118,18 +6448,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py b/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py index fb344faa3a22..a7c0d0a5f668 100644 --- a/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py +++ b/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py @@ -41,16 +41,17 @@ class merchant_accountsCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'accept_terms_of_service': ('name', 'account', 'region_code', ), 'claim_homepage': ('name', ), - 'create_and_configure_account': ('account', 'users', 'accept_terms_of_service', 'service', ), + 'create_and_configure_account': ('account', 'service', 'users', 'accept_terms_of_service', ), 'create_region': ('parent', 'region_id', 'region', ), 'create_user': ('parent', 'user_id', 'user', ), - 'delete_account': ('name', ), + 'delete_account': ('name', 'force', ), 'delete_region': ('name', ), 'delete_user': ('name', ), 'disable_program': ('name', ), 'enable_program': ('name', ), 'get_account': ('name', ), 'get_account_tax': ('name', ), + 'get_autofeed_settings': ('name', ), 'get_business_identity': ('name', ), 'get_business_info': ('name', ), 'get_email_preferences': ('name', ), @@ -76,6 +77,7 @@ class merchant_accountsCallTransformer(cst.CSTTransformer): 'unclaim_homepage': ('name', ), 'update_account': ('account', 'update_mask', ), 'update_account_tax': ('account_tax', 'update_mask', ), + 'update_autofeed_settings': ('autofeed_settings', 'update_mask', ), 'update_business_identity': ('business_identity', 'update_mask', ), 'update_business_info': ('business_info', 'update_mask', ), 'update_email_preferences': ('email_preferences', 'update_mask', ), diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py index 9cfff1670b0a..800b1a6017e0 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py @@ -35,7 +35,6 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import json_format -from google.type import datetime_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -1260,6 +1259,7 @@ def test_list_account_issues_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", language_code="language_code_value", + time_zone="time_zone_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1276,6 +1276,7 @@ def test_list_account_issues_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", language_code="language_code_value", + time_zone="time_zone_value", ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py index c5e99b1fa6eb..c688a4d567ce 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py @@ -34,7 +34,6 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.type import datetime_pb2 # type: ignore @@ -52,7 +51,12 @@ pagers, transports, ) -from google.shopping.merchant_accounts_v1beta.types import accessright, accounts, user +from google.shopping.merchant_accounts_v1beta.types import ( + accessright, + accounts, + accountservices, + user, +) def client_cert_source_callback(): @@ -4006,7 +4010,15 @@ def test_create_and_configure_account_rest_unset_required_fields(): ) unset_fields = transport.create_and_configure_account._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("account",))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "account", + "service", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4192,6 +4204,8 @@ def test_delete_account_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_account._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4242,7 +4256,7 @@ def test_delete_account_rest_unset_required_fields(): ) unset_fields = transport.delete_account._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("force",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py new file mode 100644 index 000000000000..0c01e92c6a8c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py @@ -0,0 +1,3474 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service import ( + AutofeedSettingsServiceAsyncClient, + AutofeedSettingsServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AutofeedSettingsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AutofeedSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AutofeedSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AutofeedSettingsServiceClient._get_client_cert_source(None, False) is None + assert ( + AutofeedSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + AutofeedSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AutofeedSettingsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AutofeedSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AutofeedSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AutofeedSettingsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AutofeedSettingsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AutofeedSettingsServiceClient._get_universe_domain(None, None) + == AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AutofeedSettingsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AutofeedSettingsServiceClient, "grpc"), + (AutofeedSettingsServiceAsyncClient, "grpc_asyncio"), + (AutofeedSettingsServiceClient, "rest"), + ], +) +def test_autofeed_settings_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AutofeedSettingsServiceGrpcTransport, "grpc"), + (transports.AutofeedSettingsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AutofeedSettingsServiceRestTransport, "rest"), + ], +) +def test_autofeed_settings_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AutofeedSettingsServiceClient, "grpc"), + (AutofeedSettingsServiceAsyncClient, "grpc_asyncio"), + (AutofeedSettingsServiceClient, "rest"), + ], +) +def test_autofeed_settings_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_autofeed_settings_service_client_get_transport_class(): + transport = AutofeedSettingsServiceClient.get_transport_class() + available_transports = [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceRestTransport, + ] + assert transport in available_transports + + transport = AutofeedSettingsServiceClient.get_transport_class("grpc") + assert transport == transports.AutofeedSettingsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +def test_autofeed_settings_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AutofeedSettingsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AutofeedSettingsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + "true", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_autofeed_settings_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AutofeedSettingsServiceClient, AutofeedSettingsServiceAsyncClient] +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutofeedSettingsServiceAsyncClient), +) +def test_autofeed_settings_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [AutofeedSettingsServiceClient, AutofeedSettingsServiceAsyncClient] +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +def test_autofeed_settings_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + ), + ], +) +def test_autofeed_settings_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_autofeed_settings_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_autofeed_settings_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AutofeedSettingsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_autofeed_settings_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.GetAutofeedSettingsRequest, + dict, + ], +) +def test_get_autofeed_settings(request_type, transport: str = "grpc"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + response = client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = autofeedsettings.GetAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_get_autofeed_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.GetAutofeedSettingsRequest() + + +def test_get_autofeed_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = autofeedsettings.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_autofeed_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.GetAutofeedSettingsRequest( + name="name_value", + ) + + +def test_get_autofeed_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autofeed_settings + ] = mock_rpc + request = {} + client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.get_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.GetAutofeedSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_autofeed_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_autofeed_settings + ] = mock_rpc + + request = {} + await client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_async( + transport: str = "grpc_asyncio", + request_type=autofeedsettings.GetAutofeedSettingsRequest, +): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = autofeedsettings.GetAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_async_from_dict(): + await test_get_autofeed_settings_async(request_type=dict) + + +def test_get_autofeed_settings_field_headers(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.GetAutofeedSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value = autofeedsettings.AutofeedSettings() + client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_field_headers_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.GetAutofeedSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + await client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_autofeed_settings_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_autofeed_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_autofeed_settings_flattened_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autofeed_settings( + autofeedsettings.GetAutofeedSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_flattened_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_autofeed_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_flattened_error_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_autofeed_settings( + autofeedsettings.GetAutofeedSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.UpdateAutofeedSettingsRequest, + dict, + ], +) +def test_update_autofeed_settings(request_type, transport: str = "grpc"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + response = client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = autofeedsettings.UpdateAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_update_autofeed_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.UpdateAutofeedSettingsRequest() + + +def test_update_autofeed_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = autofeedsettings.UpdateAutofeedSettingsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_autofeed_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.UpdateAutofeedSettingsRequest() + + +def test_update_autofeed_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_autofeed_settings + ] = mock_rpc + request = {} + client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.update_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.UpdateAutofeedSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_autofeed_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_autofeed_settings + ] = mock_rpc + + request = {} + await client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_async( + transport: str = "grpc_asyncio", + request_type=autofeedsettings.UpdateAutofeedSettingsRequest, +): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = autofeedsettings.UpdateAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_async_from_dict(): + await test_update_autofeed_settings_async(request_type=dict) + + +def test_update_autofeed_settings_field_headers(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.UpdateAutofeedSettingsRequest() + + request.autofeed_settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value = autofeedsettings.AutofeedSettings() + client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "autofeed_settings.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_field_headers_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.UpdateAutofeedSettingsRequest() + + request.autofeed_settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + await client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "autofeed_settings.name=name_value", + ) in kw["metadata"] + + +def test_update_autofeed_settings_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_autofeed_settings( + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].autofeed_settings + mock_val = autofeedsettings.AutofeedSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_autofeed_settings_flattened_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_autofeed_settings( + autofeedsettings.UpdateAutofeedSettingsRequest(), + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_flattened_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_autofeed_settings( + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].autofeed_settings + mock_val = autofeedsettings.AutofeedSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_flattened_error_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_autofeed_settings( + autofeedsettings.UpdateAutofeedSettingsRequest(), + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.GetAutofeedSettingsRequest, + dict, + ], +) +def test_get_autofeed_settings_rest(request_type): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/autofeedSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_autofeed_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_get_autofeed_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autofeed_settings + ] = mock_rpc + + request = {} + client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_autofeed_settings_rest_required_fields( + request_type=autofeedsettings.GetAutofeedSettingsRequest, +): + transport_class = transports.AutofeedSettingsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autofeed_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autofeed_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_autofeed_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_autofeed_settings_rest_unset_required_fields(): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_autofeed_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_autofeed_settings_rest_interceptors(null_interceptor): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutofeedSettingsServiceRestInterceptor(), + ) + client = AutofeedSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, "post_get_autofeed_settings" + ) as post, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, "pre_get_autofeed_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autofeedsettings.GetAutofeedSettingsRequest.pb( + autofeedsettings.GetAutofeedSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autofeedsettings.AutofeedSettings.to_json( + autofeedsettings.AutofeedSettings() + ) + + request = autofeedsettings.GetAutofeedSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autofeedsettings.AutofeedSettings() + + client.get_autofeed_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_autofeed_settings_rest_bad_request( + transport: str = "rest", request_type=autofeedsettings.GetAutofeedSettingsRequest +): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/autofeedSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_autofeed_settings(request) + + +def test_get_autofeed_settings_rest_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/autofeedSettings"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_autofeed_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/autofeedSettings}" + % client.transport._host, + args[1], + ) + + +def test_get_autofeed_settings_rest_flattened_error(transport: str = "rest"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autofeed_settings( + autofeedsettings.GetAutofeedSettingsRequest(), + name="name_value", + ) + + +def test_get_autofeed_settings_rest_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.UpdateAutofeedSettingsRequest, + dict, + ], +) +def test_update_autofeed_settings_rest(request_type): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"autofeed_settings": {"name": "accounts/sample1/autofeedSettings"}} + request_init["autofeed_settings"] = { + "name": "accounts/sample1/autofeedSettings", + "enable_products": True, + "eligible": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = autofeedsettings.UpdateAutofeedSettingsRequest.meta.fields[ + "autofeed_settings" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autofeed_settings"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autofeed_settings"][field])): + del request_init["autofeed_settings"][field][i][subfield] + else: + del request_init["autofeed_settings"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_autofeed_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_update_autofeed_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_autofeed_settings + ] = mock_rpc + + request = {} + client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_autofeed_settings_rest_required_fields( + request_type=autofeedsettings.UpdateAutofeedSettingsRequest, +): + transport_class = transports.AutofeedSettingsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_autofeed_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_autofeed_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_autofeed_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_autofeed_settings_rest_unset_required_fields(): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_autofeed_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "autofeedSettings", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_autofeed_settings_rest_interceptors(null_interceptor): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutofeedSettingsServiceRestInterceptor(), + ) + client = AutofeedSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, + "post_update_autofeed_settings", + ) as post, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, + "pre_update_autofeed_settings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autofeedsettings.UpdateAutofeedSettingsRequest.pb( + autofeedsettings.UpdateAutofeedSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autofeedsettings.AutofeedSettings.to_json( + autofeedsettings.AutofeedSettings() + ) + + request = autofeedsettings.UpdateAutofeedSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autofeedsettings.AutofeedSettings() + + client.update_autofeed_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_autofeed_settings_rest_bad_request( + transport: str = "rest", request_type=autofeedsettings.UpdateAutofeedSettingsRequest +): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"autofeed_settings": {"name": "accounts/sample1/autofeedSettings"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_autofeed_settings(request) + + +def test_update_autofeed_settings_rest_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + + # get arguments that satisfy an http rule for this method + sample_request = { + "autofeed_settings": {"name": "accounts/sample1/autofeedSettings"} + } + + # get truthy value for each flattened field + mock_args = dict( + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_autofeed_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{autofeed_settings.name=accounts/*/autofeedSettings}" + % client.transport._host, + args[1], + ) + + +def test_update_autofeed_settings_rest_flattened_error(transport: str = "rest"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_autofeed_settings( + autofeedsettings.UpdateAutofeedSettingsRequest(), + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_autofeed_settings_rest_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AutofeedSettingsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AutofeedSettingsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + transports.AutofeedSettingsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AutofeedSettingsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AutofeedSettingsServiceGrpcTransport, + ) + + +def test_autofeed_settings_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AutofeedSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_autofeed_settings_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AutofeedSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_autofeed_settings", + "update_autofeed_settings", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_autofeed_settings_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutofeedSettingsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_autofeed_settings_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutofeedSettingsServiceTransport() + adc.assert_called_once() + + +def test_autofeed_settings_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutofeedSettingsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + transports.AutofeedSettingsServiceRestTransport, + ], +) +def test_autofeed_settings_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutofeedSettingsServiceGrpcTransport, grpc_helpers), + (transports.AutofeedSettingsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_autofeed_settings_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_autofeed_settings_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AutofeedSettingsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_autofeed_settings_service_host_no_port(transport_name): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_autofeed_settings_service_host_with_port(transport_name): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_autofeed_settings_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AutofeedSettingsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AutofeedSettingsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_autofeed_settings._session + session2 = client2.transport.get_autofeed_settings._session + assert session1 != session2 + session1 = client1.transport.update_autofeed_settings._session + session2 = client2.transport.update_autofeed_settings._session + assert session1 != session2 + + +def test_autofeed_settings_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AutofeedSettingsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_autofeed_settings_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AutofeedSettingsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_autofeed_settings_path(): + account = "squid" + expected = "accounts/{account}/autofeedSettings".format( + account=account, + ) + actual = AutofeedSettingsServiceClient.autofeed_settings_path(account) + assert expected == actual + + +def test_parse_autofeed_settings_path(): + expected = { + "account": "clam", + } + path = AutofeedSettingsServiceClient.autofeed_settings_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_autofeed_settings_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AutofeedSettingsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = AutofeedSettingsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AutofeedSettingsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = AutofeedSettingsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AutofeedSettingsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = AutofeedSettingsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = AutofeedSettingsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = AutofeedSettingsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AutofeedSettingsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = AutofeedSettingsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AutofeedSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AutofeedSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AutofeedSettingsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py index 15afcf6bc479..e2e4eb7c38fb 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py @@ -1216,6 +1216,7 @@ def test_get_business_info(request_type, transport: str = "grpc"): call.return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) response = client.get_business_info(request) @@ -1232,6 +1233,10 @@ def test_get_business_info(request_type, transport: str = "grpc"): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_get_business_info_empty_call(): @@ -1340,6 +1345,7 @@ async def test_get_business_info_empty_call_async(): businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.get_business_info() @@ -1412,6 +1418,7 @@ async def test_get_business_info_async( businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.get_business_info(request) @@ -1429,6 +1436,10 @@ async def test_get_business_info_async( response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) @pytest.mark.asyncio @@ -1612,6 +1623,7 @@ def test_update_business_info(request_type, transport: str = "grpc"): call.return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) response = client.update_business_info(request) @@ -1628,6 +1640,10 @@ def test_update_business_info(request_type, transport: str = "grpc"): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_update_business_info_empty_call(): @@ -1734,6 +1750,7 @@ async def test_update_business_info_empty_call_async(): businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.update_business_info() @@ -1806,6 +1823,7 @@ async def test_update_business_info_async( businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.update_business_info(request) @@ -1823,6 +1841,10 @@ async def test_update_business_info_async( response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) @pytest.mark.asyncio @@ -2014,6 +2036,7 @@ def test_get_business_info_rest(request_type): return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) # Wrap the value into a proper Response obj @@ -2034,6 +2057,10 @@ def test_get_business_info_rest(request_type): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_get_business_info_rest_use_cached_wrapped_rpc(): @@ -2341,6 +2368,7 @@ def test_update_business_info_rest(request_type): }, "phone_verification_state": 1, "customer_service": {"uri": "uri_value", "email": "email_value", "phone": {}}, + "korean_business_registration_number": "korean_business_registration_number_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -2417,6 +2445,7 @@ def get_message_fields(field): return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) # Wrap the value into a proper Response obj @@ -2437,6 +2466,10 @@ def get_message_fields(field): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_update_business_info_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py index e50f9373a6c8..4ea60222ebc9 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py @@ -2637,6 +2637,120 @@ def test_retrieve_latest_terms_of_service_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 +def test_retrieve_latest_terms_of_service_rest_required_fields( + request_type=termsofservice.RetrieveLatestTermsOfServiceRequest, +): + transport_class = transports.TermsOfServiceServiceRestTransport + + request_init = {} + request_init["region_code"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "regionCode" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_latest_terms_of_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "regionCode" in jsonified_request + assert jsonified_request["regionCode"] == request_init["region_code"] + + jsonified_request["regionCode"] = "region_code_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_latest_terms_of_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "kind", + "region_code", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "regionCode" in jsonified_request + assert jsonified_request["regionCode"] == "region_code_value" + + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = termsofservice.TermsOfService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = termsofservice.TermsOfService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_latest_terms_of_service(request) + + expected_params = [ + ( + "regionCode", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_latest_terms_of_service_rest_unset_required_fields(): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.retrieve_latest_terms_of_service._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "kind", + "regionCode", + ) + ) + & set( + ( + "regionCode", + "kind", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_retrieve_latest_terms_of_service_rest_interceptors(null_interceptor): transport = transports.TermsOfServiceServiceRestTransport( From c38431b363fd4f18bb692593f401e3ac3759637c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:58:20 -0400 Subject: [PATCH 46/59] feat: [google-cloud-channel] Add support for primary_admin_email as customer_identity for ImportCustomer (#13126) BEGIN_COMMIT_OVERRIDE feat: Add support for primary_admin_email as customer_identity for ImportCustomer feat: Add support for importing team customer from a different reseller feat: Add support to look up team customer Cloud Identity information docs: Clarify the expected value of the domain field for team type customers END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Add support for importing team customer from a different reseller feat: Add support to look up team customer Cloud Identity information docs: Clarify the expected value of the domain field for team type customers PiperOrigin-RevId: 682051698 Source-Link: https://github.com/googleapis/googleapis/commit/b6a27d13a2f0223051ef720e4e9d0d52323560e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/efd321b69d8e4032c2690c19e8131d7b1702f977 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNoYW5uZWwvLk93bEJvdC55YW1sIiwiaCI6ImVmZDMyMWI2OWQ4ZTQwMzJjMjY5MGMxOWU4MTMxZDdiMTcwMmY5NzcifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/channel_v1/types/service.py | 34 ++++++++++++++++++- .../scripts/fixup_channel_v1_keywords.py | 4 +-- .../channel_v1/test_cloud_channel_service.py | 4 +++ 3 files changed, 39 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py index c8ae1f8d383f..9738f170a601 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py @@ -127,7 +127,12 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): the format: accounts/{account_id} domain (str): Required. Domain to fetch for Cloud Identity - account customer. + account customers, including domain and team + customers. For team customers, please use the + domain for their emails. + primary_admin_email (str): + Optional. Primary admin email to fetch for + Cloud Identity account team customer. """ parent: str = proto.Field( @@ -138,6 +143,10 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): proto.STRING, number=2, ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=4, + ) class CloudIdentityCustomerAccount(proto.Message): @@ -159,6 +168,11 @@ class CloudIdentityCustomerAccount(proto.Message): customer_cloud_identity_id (str): If existing = true, the Cloud Identity ID of the customer. + customer_type (google.cloud.channel_v1.types.CloudIdentityInfo.CustomerType): + If existing = true, the type of the customer. + channel_partner_cloud_identity_id (str): + If existing = true, and is 2-tier customer, + the channel partner of the customer. """ existing: bool = proto.Field( @@ -177,6 +191,15 @@ class CloudIdentityCustomerAccount(proto.Message): proto.STRING, number=4, ) + customer_type: common.CloudIdentityInfo.CustomerType = proto.Field( + proto.ENUM, + number=5, + enum=common.CloudIdentityInfo.CustomerType, + ) + channel_partner_cloud_identity_id: str = proto.Field( + proto.STRING, + number=6, + ) class CheckCloudIdentityAccountsExistResponse(proto.Message): @@ -373,6 +396,10 @@ class ImportCustomerRequest(proto.Message): cloud_identity_id (str): Required. Customer's Cloud Identity ID + This field is a member of `oneof`_ ``customer_identity``. + primary_admin_email (str): + Required. Customer's primary admin email. + This field is a member of `oneof`_ ``customer_identity``. parent (str): Required. The resource name of the reseller's account. @@ -413,6 +440,11 @@ class ImportCustomerRequest(proto.Message): number=3, oneof="customer_identity", ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=8, + oneof="customer_identity", + ) parent: str = proto.Field( proto.STRING, number=1, diff --git a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py index 7c3e175a35d5..a7022924a590 100644 --- a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py +++ b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py @@ -44,7 +44,7 @@ class channelCallTransformer(cst.CSTTransformer): 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', ), 'change_parameters': ('name', 'parameters', 'request_id', 'purchase_order_id', ), 'change_renewal_settings': ('name', 'renewal_settings', 'request_id', ), - 'check_cloud_identity_accounts_exist': ('parent', 'domain', ), + 'check_cloud_identity_accounts_exist': ('parent', 'domain', 'primary_admin_email', ), 'create_channel_partner_link': ('parent', 'channel_partner_link', ), 'create_channel_partner_repricing_config': ('parent', 'channel_partner_repricing_config', ), 'create_customer': ('parent', 'customer', ), @@ -59,7 +59,7 @@ class channelCallTransformer(cst.CSTTransformer): 'get_customer': ('name', ), 'get_customer_repricing_config': ('name', ), 'get_entitlement': ('name', ), - 'import_customer': ('domain', 'cloud_identity_id', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), + 'import_customer': ('domain', 'cloud_identity_id', 'primary_admin_email', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), 'list_channel_partner_links': ('parent', 'page_size', 'page_token', 'view', ), 'list_channel_partner_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), 'list_customer_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index f454d78314e5..b6ab34645279 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -2110,6 +2110,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat request = service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2125,6 +2126,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat assert args[0] == service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) @@ -3397,6 +3399,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): request = service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", @@ -3414,6 +3417,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): assert args[0] == service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", From eeab5c1ee68559605ab3dc251314d45bd92191cd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 12:24:29 -0400 Subject: [PATCH 47/59] feat: [google-cloud-documentai] Removed deprecated api versions (#13125) BEGIN_COMMIT_OVERRIDE feat: Removed deprecated api versions END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 681956159 Source-Link: https://github.com/googleapis/googleapis/commit/ff4436beeb10715bedeb54da24f87a3b4dcddcea Source-Link: https://github.com/googleapis/googleapis-gen/commit/1b41353956190b52884e5bcc7eceda2b9a92f756 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRvY3VtZW50YWkvLk93bEJvdC55YW1sIiwiaCI6IjFiNDEzNTM5NTYxOTBiNTI4ODRlNWJjYzdlY2VkYTJiOWE5MmY3NTYifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/documentai/gapic_version.py | 2 +- .../cloud/documentai_v1/gapic_version.py | 2 +- .../cloud/documentai_v1beta3/gapic_version.py | 2 +- ...g_service_batch_process_documents_async.py | 60 ---- ...ng_service_batch_process_documents_sync.py | 60 ---- ...standing_service_process_document_async.py | 56 --- ...rstanding_service_process_document_sync.py | 56 --- ...t_metadata_google.cloud.documentai.v1.json | 2 +- ...adata_google.cloud.documentai.v1beta2.json | 329 ------------------ ...adata_google.cloud.documentai.v1beta3.json | 2 +- 10 files changed, 5 insertions(+), 566 deletions(-) delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py deleted file mode 100644 index 460ca2fce376..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchProcessDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -async def sample_batch_process_documents(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceAsyncClient() - - # Initialize request argument(s) - requests = documentai_v1beta2.ProcessDocumentRequest() - requests.input_config.gcs_source.uri = "uri_value" - requests.input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.BatchProcessDocumentsRequest( - requests=requests, - ) - - # Make the request - operation = client.batch_process_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py deleted file mode 100644 index b381b928b678..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchProcessDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -def sample_batch_process_documents(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceClient() - - # Initialize request argument(s) - requests = documentai_v1beta2.ProcessDocumentRequest() - requests.input_config.gcs_source.uri = "uri_value" - requests.input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.BatchProcessDocumentsRequest( - requests=requests, - ) - - # Make the request - operation = client.batch_process_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py deleted file mode 100644 index 54eef08269df..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ProcessDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -async def sample_process_document(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceAsyncClient() - - # Initialize request argument(s) - input_config = documentai_v1beta2.InputConfig() - input_config.gcs_source.uri = "uri_value" - input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.ProcessDocumentRequest( - input_config=input_config, - ) - - # Make the request - response = await client.process_document(request=request) - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_async] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py deleted file mode 100644 index f81a39312de1..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ProcessDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -def sample_process_document(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceClient() - - # Initialize request argument(s) - input_config = documentai_v1beta2.InputConfig() - input_config.gcs_source.uri = "uri_value" - input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.ProcessDocumentRequest( - input_config=input_config, - ) - - # Make the request - response = client.process_document(request=request) - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_sync] diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 2fc98b45f209..96d60af285a2 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json deleted file mode 100644 index 31e4348ff0cb..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ /dev/null @@ -1,329 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.documentai.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-documentai", - "version": "2.32.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient", - "shortName": "DocumentUnderstandingServiceAsyncClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient.batch_process_documents", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.BatchProcessDocuments", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "BatchProcessDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.BatchProcessDocumentsRequest" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.documentai_v1beta2.types.ProcessDocumentRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_process_documents" - }, - "description": "Sample for BatchProcessDocuments", - "file": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient", - "shortName": "DocumentUnderstandingServiceClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient.batch_process_documents", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.BatchProcessDocuments", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "BatchProcessDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.BatchProcessDocumentsRequest" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.documentai_v1beta2.types.ProcessDocumentRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_process_documents" - }, - "description": "Sample for BatchProcessDocuments", - "file": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient", - "shortName": "DocumentUnderstandingServiceAsyncClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient.process_document", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.ProcessDocument", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "ProcessDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.ProcessDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.documentai_v1beta2.types.Document", - "shortName": "process_document" - }, - "description": "Sample for ProcessDocument", - "file": "documentai_v1beta2_generated_document_understanding_service_process_document_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_process_document_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient", - "shortName": "DocumentUnderstandingServiceClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient.process_document", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.ProcessDocument", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "ProcessDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.ProcessDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.documentai_v1beta2.types.Document", - "shortName": "process_document" - }, - "description": "Sample for ProcessDocument", - "file": "documentai_v1beta2_generated_document_understanding_service_process_document_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_process_document_sync.py" - } - ] -} diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 43bcd3c8902f..f47545a8ed3e 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { From 3881914b43b47bf2ee187f62447ef9eccc851749 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:33:30 -0400 Subject: [PATCH 48/59] feat: [google-apps-chat] Add doc for permission settings & announcement space support (#13120) BEGIN_COMMIT_OVERRIDE feat: Removed deprecated api versions END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Add doc for import mode external users support docs: Messages API dev docs improvement docs: Memberships API dev docs improvement docs: Discoverable space docs improvement PiperOrigin-RevId: 681521060 Source-Link: https://github.com/googleapis/googleapis/commit/c472cf7c64e401e8f55353fddab1b5cd81efb607 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a905bb22c968ebdded136b282ef073992fc140c5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiYTkwNWJiMjJjOTY4ZWJkZGVkMTM2YjI4MmVmMDczOTkyZmMxNDBjNSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: ohmayr --- .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/gapic_version.py | 2 +- .../services/chat_service/async_client.py | 271 +++++++------ .../chat_v1/services/chat_service/client.py | 273 +++++++------ .../services/chat_service/transports/grpc.py | 114 +++--- .../chat_service/transports/grpc_asyncio.py | 114 +++--- .../services/chat_service/transports/rest.py | 3 +- .../google/apps/chat_v1/types/message.py | 29 +- .../google/apps/chat_v1/types/space.py | 372 +++++++++++++----- .../google/apps/chat_v1/types/space_event.py | 5 +- ...nerated_chat_service_create_space_async.py | 4 + ...enerated_chat_service_create_space_sync.py | 4 + ...nerated_chat_service_set_up_space_async.py | 4 + ...enerated_chat_service_set_up_space_sync.py | 4 + ...nerated_chat_service_update_space_async.py | 4 + ...enerated_chat_service_update_space_sync.py | 4 + .../snippet_metadata_google.chat.v1.json | 86 ++-- .../unit/gapic/chat_v1/test_chat_service.py | 66 ++++ 18 files changed, 862 insertions(+), 499 deletions(-) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 82774eb03431..8e88d84deba6 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -314,19 +314,36 @@ async def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python @@ -464,9 +481,12 @@ async def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesAsyncPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -1759,8 +1779,9 @@ async def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -1768,6 +1789,11 @@ async def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -1787,7 +1813,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -1798,16 +1828,22 @@ async def sample_create_space(): Args: request (Optional[Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (:class:`google.apps.chat_v1.types.Space`): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -1952,7 +1988,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -2042,7 +2082,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -2067,68 +2111,73 @@ async def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2495,46 +2544,25 @@ async def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -3694,6 +3722,9 @@ async def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 0d542091414a..a65d4dcb0a5a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -878,23 +878,40 @@ def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python - # This snippet has been automatically generated and should be regarded as a + # This snippet has been aubuomatically generated and should be regarded as a # code template only. # It will require modifications to work: # - It may require correct/in-range values for request initialization. @@ -1025,9 +1042,12 @@ def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -2290,8 +2310,9 @@ def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -2299,6 +2320,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -2318,7 +2344,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -2329,16 +2359,22 @@ def sample_create_space(): Args: request (Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -2480,7 +2516,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -2568,7 +2608,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -2593,68 +2637,73 @@ def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -3011,46 +3060,25 @@ def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -4183,6 +4211,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index bfb0492b21e8..d02bc6784c2a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -254,19 +254,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], gc_message.Message]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -293,9 +310,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -686,8 +706,9 @@ def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -695,6 +716,11 @@ def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -944,46 +970,25 @@ def create_membership( ) -> Callable[[gc_membership.CreateMembershipRequest], gc_membership.Membership]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1271,6 +1276,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index a404fca34305..86137f66eff8 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -258,19 +258,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], Awaitable[gc_message.Message]]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -299,9 +316,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -698,8 +718,9 @@ def create_space( ) -> Callable[[gc_space.CreateSpaceRequest], Awaitable[gc_space.Space]]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -707,6 +728,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -964,46 +990,25 @@ def create_membership( ]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1301,6 +1306,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index f9c4a5cd53a3..d9717f7c33ea 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -1409,7 +1409,8 @@ def __call__( Args: request (~.gc_space.CreateSpaceRequest): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/message.py b/packages/google-apps-chat/google/apps/chat_v1/types/message.py index 90dda263a1ec..56d10d7b3574 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/message.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/message.py @@ -109,8 +109,8 @@ class Message(proto.Message): user `__, or everyone in the space. - To learn about creating text messages, see `Send a text - message `__. + To learn about creating text messages, see `Send a + message `__. formatted_text (str): Output only. Contains the message ``text`` with markups added to communicate formatting. This field might not @@ -154,8 +154,9 @@ class Message(proto.Message): user `__, the messages can't contain cards. - To learn about cards and how to create them, see `Send card - messages `__. + To learn how to create a message that contains cards, see + `Send a + message `__. `Card builder `__ @@ -213,17 +214,17 @@ class Message(proto.Message): Immutable. Input for creating a message, otherwise output only. The user that can view the message. When set, the message is private and only visible to the specified user - and the Chat app. Link previews and attachments aren't - supported for private messages. + and the Chat app. To include this field in your request, you + must call the Chat API using `app + authentication `__ + and omit the following: - Only Chat apps can send private messages. If your Chat app - `authenticates as a - user `__ - to send a message, the message can't be private and must - omit this field. + - `Attachments `__ + - `Accessory + widgets `__ - For details, see `Send private messages to Google Chat - users `__. + For details, see `Send a message + privately `__. deletion_metadata (google.apps.chat_v1.types.DeletionMetadata): Output only. Information about a deleted message. A message is deleted when ``delete_time`` is set. @@ -428,7 +429,7 @@ class Thread(proto.Message): Attributes: name (str): - Output only. Resource name of the thread. + Resource name of the thread. Example: ``spaces/{space}/threads/{thread}`` thread_key (str): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 75456c5e5e13..694375fc0ebd 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -46,11 +46,26 @@ class Space(proto.Message): r"""A space in Google Chat. Spaces are conversations between two or more users or 1:1 messages between a user and a Chat app. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Resource name of the space. Format: ``spaces/{space}`` + + Where ``{space}`` represents the system-assigned ID for the + space. You can obtain the space ID by calling the + ```spaces.list()`` `__ + method or from the space URL. For example, if the space URL + is + ``https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA``, + the space ID is ``AAAAAAAAA``. type_ (google.apps.chat_v1.types.Space.Type): Output only. Deprecated: Use ``space_type`` instead. The type of a space. @@ -66,12 +81,12 @@ class Space(proto.Message): instead. Whether messages are threaded in this space. display_name (str): The space's display name. Required when `creating a - space `__. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space or updating the ``displayName``, try a - different ``displayName``. An existing space within the - Google Workspace organization might already use this display - name. + space `__ + with a ``spaceType`` of ``SPACE``. If you receive the error + message ``ALREADY_EXISTS`` when creating a space or updating + the ``displayName``, try a different ``displayName``. An + existing space within the Google Workspace organization + might already use this display name. For direct messages, this field might be empty. @@ -86,15 +101,6 @@ class Space(proto.Message): user account). By default, a space created by a consumer account permits any Google Chat user. - - The space is used to [import data to Google Chat] - (https://developers.google.com/chat/api/guides/import-data-overview) - because import mode spaces must only permit members from - the same Google Workspace organization. However, as part - of the `Google Workspace Developer Preview - Program `__, - import mode spaces can permit any Google Chat user so - this field can then be set for import mode spaces. - For existing spaces, this field is output only. space_threading_state (google.apps.chat_v1.types.Space.SpaceThreadingState): Output only. The threading state in the Chat @@ -146,6 +152,21 @@ class Space(proto.Message): space_uri (str): Output only. The URI for a user to access the space. + predefined_permission_settings (google.apps.chat_v1.types.Space.PredefinedPermissionSettings): + Optional. Input only. Predefined space permission settings, + input only when creating a space. If the field is not set, a + collaboration space is created. After you create the space, + settings are populated in the ``PermissionSettings`` field. + + This field is a member of `oneof`_ ``space_permission_settings``. + permission_settings (google.apps.chat_v1.types.Space.PermissionSettings): + Optional. Space permission settings for + existing spaces. Input for updating exact space + permission settings, where existing permission + settings are replaced. Output lists current + permission settings. + + This field is a member of `oneof`_ ``space_permission_settings``. """ class Type(proto.Enum): @@ -212,6 +233,27 @@ class SpaceThreadingState(proto.Enum): GROUPED_MESSAGES = 3 UNTHREADED_MESSAGES = 4 + class PredefinedPermissionSettings(proto.Enum): + r"""Predefined permission settings that you can only specify when + creating a named space. More settings might be added in the future. + For details about permission settings for named spaces, see `Learn + about spaces `__. + + Values: + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED (0): + Unspecified. Don't use. + COLLABORATION_SPACE (1): + Setting to make the space a collaboration + space where all members can post messages. + ANNOUNCEMENT_SPACE (2): + Setting to make the space an announcement + space where only space managers can post + messages. + """ + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED = 0 + COLLABORATION_SPACE = 1 + ANNOUNCEMENT_SPACE = 2 + class SpaceDetails(proto.Message): r"""Details about the space including description and rules. @@ -273,14 +315,20 @@ class AccessSettings(proto.Message): Optional. The resource name of the `target audience `__ who can discover the space, join the space, and preview the - messages in the space. For details, see `Make a space - discoverable to a target + messages in the space. If unset, only users or Google Groups + who have been individually invited or added to the space can + access it. For details, see `Make a space discoverable to a + target audience `__. Format: ``audiences/{audience}`` To use the default target audience for the Google Workspace organization, set to ``audiences/default``. + + This field is not populated when using the ``chat.bot`` + scope with `app + authentication `__. """ class AccessState(proto.Enum): @@ -291,12 +339,17 @@ class AccessState(proto.Enum): Access state is unknown or not supported in this API. PRIVATE (1): - Space is discoverable by added or invited - members or groups. + Only users or Google Groups that have been + individually added or invited by other users or + Google Workspace administrators can discover and + access the space. DISCOVERABLE (2): - Space is discoverable by the selected `target - audience `__, - as well as added or invited members or groups. + A space manager has granted a target audience access to the + space. Users or Google Groups that have been individually + added or invited to the space can also discover and access + the space. To learn more, see `Make a space discoverable to + specific + users `__. """ ACCESS_STATE_UNSPECIFIED = 0 PRIVATE = 1 @@ -312,6 +365,125 @@ class AccessState(proto.Enum): number=3, ) + class PermissionSettings(proto.Message): + r"""`Permission + settings `__ that + you can specify when updating an existing named space. + + To set permission settings when creating a space, specify the + ``PredefinedPermissionSettings`` field in your request. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manage_members_and_groups (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing members and groups in a + space. + + This field is a member of `oneof`_ ``_manage_members_and_groups``. + modify_space_details (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for updating space name, avatar, + description and guidelines. + + This field is a member of `oneof`_ ``_modify_space_details``. + toggle_history (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for toggling space history on and + off. + + This field is a member of `oneof`_ ``_toggle_history``. + use_at_mention_all (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for using @all in a space. + + This field is a member of `oneof`_ ``_use_at_mention_all``. + manage_apps (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing apps in a space. + + This field is a member of `oneof`_ ``_manage_apps``. + manage_webhooks (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing webhooks in a space. + + This field is a member of `oneof`_ ``_manage_webhooks``. + post_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Output only. Setting for posting messages in + a space. + + This field is a member of `oneof`_ ``_post_messages``. + reply_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for replying to messages in a space. + + This field is a member of `oneof`_ ``_reply_messages``. + """ + + manage_members_and_groups: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Space.PermissionSetting", + ) + modify_space_details: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Space.PermissionSetting", + ) + toggle_history: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Space.PermissionSetting", + ) + use_at_mention_all: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Space.PermissionSetting", + ) + manage_apps: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="Space.PermissionSetting", + ) + manage_webhooks: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="Space.PermissionSetting", + ) + post_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Space.PermissionSetting", + ) + reply_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="Space.PermissionSetting", + ) + + class PermissionSetting(proto.Message): + r"""Represents a space permission setting. + + Attributes: + managers_allowed (bool): + Whether spaces managers have this permission. + members_allowed (bool): + Whether non-manager members have this + permission. + """ + + managers_allowed: bool = proto.Field( + proto.BOOL, + number=1, + ) + members_allowed: bool = proto.Field( + proto.BOOL, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -389,20 +561,37 @@ class AccessState(proto.Enum): proto.STRING, number=25, ) + predefined_permission_settings: PredefinedPermissionSettings = proto.Field( + proto.ENUM, + number=26, + oneof="space_permission_settings", + enum=PredefinedPermissionSettings, + ) + permission_settings: PermissionSettings = proto.Field( + proto.MESSAGE, + number=27, + oneof="space_permission_settings", + message=PermissionSettings, + ) class CreateSpaceRequest(proto.Message): - r"""A request to create a named space. + r"""A request to create a named space with no members. Attributes: space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try a + different ``displayName``. An existing space within the + Google Workspace organization might already use this display + name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` is + set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -491,8 +680,9 @@ class ListSpacesResponse(proto.Message): Attributes: spaces (MutableSequence[google.apps.chat_v1.types.Space]): - List of spaces in the requested (or first) - page. + List of spaces in the requested (or first) page. Note: The + ``permissionSettings`` field is not returned in the Space + object for list requests. next_page_token (str): You can send a token as ``pageToken`` to retrieve the next page of results. If empty, there are no subsequent pages. @@ -587,67 +777,69 @@ class UpdateSpaceRequest(proto.Message): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display name - of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to update - the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid argument - error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, try - a different ``displayName``. An existing space within the - Google Workspace organization might already use this - display name.) - - - ``space_type`` (Only supports changing a ``GROUP_CHAT`` - space type to ``SPACE``. Include ``display_name`` - together with ``space_type`` in the update mask and - ensure that the specified space has a non-empty display - name and the ``SPACE`` space type. Including the - ``space_type`` mask and the ``SPACE`` type in the - specified space when updating the display name is - optional if the existing space already has the ``SPACE`` - type. Trying to update the space type in other ways - results in an invalid argument error). ``space_type`` is - not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on or - off for the - space `__ - if `the organization allows users to change their history - setting `__. - Warning: mutually exclusive with all other field paths.) - ``space_history_state`` is not supported with admin - access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with admin - access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: mutually - exclusive with all other non-permission settings field - paths). ``permission_settings`` is not supported with - admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. Supports + up to 150 characters. + + ``display_name``: Only supports updating the display name + for spaces where ``spaceType`` field is ``SPACE``. If you + receive the error message ``ALREADY_EXISTS``, try a + different value. An existing space within the Google + Workspace organization might already use this display name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` together + with ``space_type`` in the update mask and ensure that the + specified space has a non-empty display name and the + ``SPACE`` space type. Including the ``space_type`` mask and + the ``SPACE`` type in the specified space when updating the + display name is optional if the existing space already has + the ``SPACE`` type. Trying to update the space type in other + ways results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only supported + if history settings are enabled for the Google Workspace + organization. To update the space history state, you must + omit all other field masks in your request. + ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and preview + the messages in named space where ``spaceType`` field is + ``SPACE``. If the existing space has a target audience, you + can remove the audience and restrict space access by + omitting a value for this field mask. To update access + settings for a space, the authenticating user must be a + space manager and omit all other field masks in your + request. You can't update this field if the space is in + `import + mode `__. + To learn more, see `Make a space discoverable to specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the `permission + settings `__ + of a space. When updating permission settings, you can only + specify ``permissionSettings`` field masks; you cannot + update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` use_admin_access (bool): When ``true``, the method runs using the user's Google Workspace administrator privileges. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py index 96591e800225..17bf07ba27d7 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py @@ -492,8 +492,9 @@ class ListSpaceEventsResponse(proto.Message): Attributes: space_events (MutableSequence[google.apps.chat_v1.types.SpaceEvent]): - Results are returned in chronological order - (oldest event first). + Results are returned in chronological order (oldest event + first). Note: The ``permissionSettings`` field is not + returned in the Space object for list requests. next_page_token (str): Continuation token used to fetch more events. If this field is omitted, there are no diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py index 047b4031cee0..b72fb46759d1 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py @@ -39,7 +39,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py index 845ce548ddc0..083a0684ae1e 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py @@ -39,7 +39,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py index 6870b8744a05..8e2b99a025ad 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py @@ -39,7 +39,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py index 4d27131a243c..61ad37d3d045 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py @@ -39,7 +39,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py index 14e0944aaaeb..2c2a593b22a2 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py @@ -39,7 +39,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py index a5158f7a9e07..362d50feb376 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py @@ -39,7 +39,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index f0d8a1017646..8f568256e424 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.11" + "version": "0.1.0" }, "snippets": [ { @@ -728,12 +728,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -743,18 +743,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -808,12 +808,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -823,18 +823,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3749,12 +3749,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3764,18 +3764,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3825,12 +3825,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3840,18 +3840,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4417,12 +4417,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4432,18 +4432,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4501,12 +4501,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4516,18 +4516,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 70b8272ff798..893992dd3ab5 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -5744,6 +5744,7 @@ def test_get_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.get_space(request) @@ -6164,6 +6165,7 @@ def test_create_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.create_space(request) @@ -6527,6 +6529,7 @@ def test_set_up_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.set_up_space(request) @@ -6810,6 +6813,7 @@ def test_update_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.update_space(request) @@ -7883,6 +7887,7 @@ def test_find_direct_message(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.find_direct_message(request) @@ -13112,6 +13117,20 @@ def test_create_message_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -15259,6 +15278,20 @@ def test_update_message_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -17113,6 +17146,7 @@ def test_get_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17441,6 +17475,20 @@ def test_create_space_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17527,6 +17575,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17843,6 +17892,7 @@ def test_set_up_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -18110,6 +18160,20 @@ def test_update_space_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -18196,6 +18260,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -19059,6 +19124,7 @@ def test_find_direct_message_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj From bbe5daf0c71a02ae780c7609d433787dec1bc168 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:46:55 -0400 Subject: [PATCH 49/59] feat: [google-cloud-deploy] added support for deploy policies (#13114) BEGIN_COMMIT_OVERRIDE feat: added support for deploy policies docs: Minor documentation updates END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: Minor documentation updates PiperOrigin-RevId: 679600689 Source-Link: https://github.com/googleapis/googleapis/commit/bd4f3686c1831554b71129898e20f2cdb7aeac49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/12a2d22bf1bda85ba9bed1fa60484306a2e55781 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRlcGxveS8uT3dsQm90LnlhbWwiLCJoIjoiMTJhMmQyMmJmMWJkYTg1YmE5YmVkMWZhNjA0ODQzMDZhMmU1NTc4MSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: ohmayr --- .../google/cloud/deploy/__init__.py | 36 + .../google/cloud/deploy_v1/__init__.py | 34 + .../cloud/deploy_v1/gapic_metadata.json | 75 + .../services/cloud_deploy/async_client.py | 645 + .../deploy_v1/services/cloud_deploy/client.py | 650 + .../deploy_v1/services/cloud_deploy/pagers.py | 152 + .../services/cloud_deploy/transports/base.py | 91 + .../services/cloud_deploy/transports/grpc.py | 134 + .../cloud_deploy/transports/grpc_asyncio.py | 185 + .../services/cloud_deploy/transports/rest.py | 734 +- .../google/cloud/deploy_v1/types/__init__.py | 34 + .../cloud/deploy_v1/types/cloud_deploy.py | 1018 +- .../google/cloud/deploy_v1/types/log_enums.py | 3 + ...cloud_deploy_create_deploy_policy_async.py | 62 + ..._cloud_deploy_create_deploy_policy_sync.py | 62 + ...cloud_deploy_delete_deploy_policy_async.py | 56 + ..._cloud_deploy_delete_deploy_policy_sync.py | 56 + ...ed_cloud_deploy_get_deploy_policy_async.py | 52 + ...ted_cloud_deploy_get_deploy_policy_sync.py | 52 + ...cloud_deploy_list_deploy_policies_async.py | 53 + ..._cloud_deploy_list_deploy_policies_sync.py | 53 + ...cloud_deploy_update_deploy_policy_async.py | 60 + ..._cloud_deploy_update_deploy_policy_sync.py | 60 + ...ippet_metadata_google.cloud.deploy.v1.json | 1079 +- .../scripts/fixup_deploy_v1_keywords.py | 23 +- .../unit/gapic/deploy_v1/test_cloud_deploy.py | 14708 ++++++++++------ 26 files changed, 14650 insertions(+), 5517 deletions(-) create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py diff --git a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py index 6a2f11ab9889..4b051017ed31 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py @@ -61,6 +61,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -74,20 +75,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -106,6 +112,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -116,10 +124,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -144,6 +156,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -159,12 +172,15 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from google.cloud.deploy_v1.types.customtargettype_notification_payload import ( CustomTargetTypeNotificationEvent, @@ -172,6 +188,9 @@ from google.cloud.deploy_v1.types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from google.cloud.deploy_v1.types.deploypolicy_evaluation_payload import ( + DeployPolicyEvaluationEvent, +) from google.cloud.deploy_v1.types.deploypolicy_notification_payload import ( DeployPolicyNotificationEvent, ) @@ -231,6 +250,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -244,20 +264,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -276,6 +301,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -286,10 +313,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -313,6 +344,7 @@ "RollbackTargetRequest", "RollbackTargetResponse", "Rollout", + "RolloutRestriction", "RuntimeConfig", "SerialPipeline", "SkaffoldModules", @@ -327,17 +359,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", "BackoffMode", "RepairState", "SkaffoldSupportState", "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py index 80367e9c4284..c9c4ecd71f25 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py @@ -58,6 +58,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -71,20 +72,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -103,6 +109,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -113,10 +121,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -141,6 +153,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -156,12 +169,15 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from .types.customtargettype_notification_payload import ( CustomTargetTypeNotificationEvent, @@ -169,6 +185,7 @@ from .types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from .types.deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent from .types.deploypolicy_notification_payload import DeployPolicyNotificationEvent from .types.jobrun_notification_payload import JobRunNotificationEvent from .types.log_enums import Type @@ -219,6 +236,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -233,15 +251,20 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeliveryPipelineNotificationEvent", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeployParameters", + "DeployPolicy", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", + "DeployPolicyResourceSelector", "DeploymentJobs", "ExecutionConfig", "GetAutomationRequest", @@ -249,6 +272,7 @@ "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -268,6 +292,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -278,10 +304,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -309,6 +339,7 @@ "RollbackTargetResponse", "Rollout", "RolloutNotificationEvent", + "RolloutRestriction", "RolloutUpdateEvent", "RuntimeConfig", "SerialPipeline", @@ -326,11 +357,14 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "Type", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", ) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json index a8eee6244a5f..32200eac6f82 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json @@ -50,6 +50,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -80,6 +85,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -110,6 +120,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -155,6 +170,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -205,6 +225,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -255,6 +280,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -285,6 +315,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -315,6 +350,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -360,6 +400,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -410,6 +455,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -460,6 +510,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -490,6 +545,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -520,6 +580,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -565,6 +630,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -615,6 +685,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 72c3f7a4098b..d4fb28cb770e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -95,6 +95,8 @@ class CloudDeployAsyncClient: parse_delivery_pipeline_path = staticmethod( CloudDeployClient.parse_delivery_pipeline_path ) + deploy_policy_path = staticmethod(CloudDeployClient.deploy_policy_path) + parse_deploy_policy_path = staticmethod(CloudDeployClient.parse_deploy_policy_path) job_path = staticmethod(CloudDeployClient.job_path) parse_job_path = staticmethod(CloudDeployClient.parse_job_path) job_run_path = staticmethod(CloudDeployClient.job_run_path) @@ -2826,6 +2828,649 @@ async def sample_abandon_release(): # Done; return the response. return response + async def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]]): + The request object. The request object for ``CreateDeployPolicy``. + parent (:class:`str`): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (:class:`str`): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]]): + The request object. The request object for ``DeleteDeployPolicy``. + name (:class:`str`): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesAsyncPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]]): + The request object. The request object for ``ListDeployPolicies``. + parent (:class:`str`): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deploy_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeployPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]]): + The request object. The request object for ``GetDeployPolicy`` + name (:class:`str`): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index af6eb35e86ed..8bb6a9e6f9b9 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -349,6 +349,28 @@ def parse_delivery_pipeline_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def deploy_policy_path( + project: str, + location: str, + deploy_policy: str, + ) -> str: + """Returns a fully-qualified deploy_policy string.""" + return "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + + @staticmethod + def parse_deploy_policy_path(path: str) -> Dict[str, str]: + """Parses a deploy_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def job_path( project: str, @@ -3507,6 +3529,634 @@ def sample_abandon_release(): # Done; return the response. return response + def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]): + The request object. The request object for ``CreateDeployPolicy``. + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]): + The request object. The request object for ``DeleteDeployPolicy``. + name (str): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]): + The request object. The request object for ``ListDeployPolicies``. + parent (str): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deploy_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeployPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]): + The request object. The request object for ``GetDeployPolicy`` + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py index 508ba8ca2d07..29ee6b427605 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py @@ -649,6 +649,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListDeployPoliciesPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloud_deploy.ListDeployPoliciesResponse], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[cloud_deploy.DeployPolicy]: + for page in self.pages: + yield from page.deploy_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeployPoliciesAsyncPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_deploy.ListDeployPoliciesResponse]], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[cloud_deploy.DeployPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.deploy_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListRolloutsPager: """A pager for iterating through ``list_rollouts`` requests. diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py index d3042176dd81..d52eff30b350 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py @@ -304,6 +304,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method.wrap_method( + self.list_deploy_policies, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method.wrap_method( + self.get_deploy_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method.wrap_method( self.approve_rollout, default_timeout=60.0, @@ -699,6 +742,54 @@ def abandon_release( ]: raise NotImplementedError() + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Union[ + cloud_deploy.ListDeployPoliciesResponse, + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], + Union[cloud_deploy.DeployPolicy, Awaitable[cloud_deploy.DeployPolicy]], + ]: + raise NotImplementedError() + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py index 4ce9240634f7..4590ce080f41 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py @@ -808,6 +808,140 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + ~.ListDeployPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + ~.DeployPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py index 62883f74e557..db641300032a 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py @@ -834,6 +834,148 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + Awaitable[~.ListDeployPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], Awaitable[cloud_deploy.DeployPolicy] + ]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + Awaitable[~.DeployPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, @@ -1577,6 +1719,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method_async.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method_async.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method_async.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method_async.wrap_method( + self.list_deploy_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method_async.wrap_method( + self.get_deploy_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method_async.wrap_method( self.approve_rollout, default_timeout=60.0, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index a96dad3bb982..bd704448565e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -138,6 +138,14 @@ def post_create_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_create_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_release(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -186,6 +194,14 @@ def post_delete_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -234,6 +250,14 @@ def post_get_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_get_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_job_run(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -306,6 +330,14 @@ def post_list_delivery_pipelines(self, response): logging.log(f"Received response: {response}") return response + def pre_list_deploy_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deploy_policies(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_job_runs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -386,6 +418,14 @@ def post_update_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_update_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -584,6 +624,29 @@ def post_create_delivery_pipeline( """ return response + def pre_create_deploy_policy( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_create_release( self, request: cloud_deploy.CreateReleaseRequest, @@ -722,6 +785,29 @@ def post_delete_delivery_pipeline( """ return response + def pre_delete_deploy_policy( + self, + request: cloud_deploy.DeleteDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.DeleteDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_delete_target( self, request: cloud_deploy.DeleteTargetRequest, @@ -858,6 +944,29 @@ def post_get_delivery_pipeline( """ return response + def pre_get_deploy_policy( + self, + request: cloud_deploy.GetDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_deploy_policy( + self, response: cloud_deploy.DeployPolicy + ) -> cloud_deploy.DeployPolicy: + """Post-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_get_job_run( self, request: cloud_deploy.GetJobRunRequest, @@ -1057,6 +1166,29 @@ def post_list_delivery_pipelines( """ return response + def pre_list_deploy_policies( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListDeployPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_deploy_policies( + self, response: cloud_deploy.ListDeployPoliciesResponse + ) -> cloud_deploy.ListDeployPoliciesResponse: + """Post-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_list_job_runs( self, request: cloud_deploy.ListJobRunsRequest, @@ -1285,6 +1417,29 @@ def post_update_delivery_pipeline( """ return response + def pre_update_deploy_policy( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.UpdateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_update_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_update_target( self, request: cloud_deploy.UpdateTargetRequest, @@ -2413,6 +2568,103 @@ def __call__( resp = self._interceptor.post_create_delivery_pipeline(resp) return resp + class _CreateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("CreateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deployPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.CreateDeployPolicyRequest): + The request object. The request object for ``CreateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_create_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.CreateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deploy_policy(resp) + return resp + class _CreateRelease(CloudDeployRestStub): def __hash__(self): return hash("CreateRelease") @@ -2963,9 +3215,9 @@ def __call__( resp = self._interceptor.post_delete_delivery_pipeline(resp) return resp - class _DeleteTarget(CloudDeployRestStub): + class _DeleteDeployPolicy(CloudDeployRestStub): def __hash__(self): - return hash("DeleteTarget") + return hash("DeleteDeployPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2979,17 +3231,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.DeleteTargetRequest, + request: cloud_deploy.DeleteDeployPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete target method over HTTP. + r"""Call the delete deploy policy method over HTTP. Args: - request (~.cloud_deploy.DeleteTargetRequest): - The request object. The request object for ``DeleteTarget``. + request (~.cloud_deploy.DeleteDeployPolicyRequest): + The request object. The request object for ``DeleteDeployPolicy``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3007,11 +3259,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/targets/*}", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", }, ] - request, metadata = self._interceptor.pre_delete_target(request, metadata) - pb_request = cloud_deploy.DeleteTargetRequest.pb(request) + request, metadata = self._interceptor.pre_delete_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.DeleteDeployPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3046,12 +3300,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_target(resp) + resp = self._interceptor.post_delete_deploy_policy(resp) return resp - class _GetAutomation(CloudDeployRestStub): + class _DeleteTarget(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomation") + return hash("DeleteTarget") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3065,17 +3319,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRequest, + request: cloud_deploy.DeleteTargetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_deploy.Automation: - r"""Call the get automation method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete target method over HTTP. Args: - request (~.cloud_deploy.GetAutomationRequest): - The request object. The request object for ``GetAutomation`` + request (~.cloud_deploy.DeleteTargetRequest): + The request object. The request object for ``DeleteTarget``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3083,26 +3337,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.cloud_deploy.Automation: - An ``Automation`` resource in the Cloud Deploy API. - - An ``Automation`` enables the automation of manually - driven actions for a Delivery Pipeline, which includes - Release promotion among Targets, Rollout repair and - Rollout deployment strategy advancement. The intention - of Automation is to reduce manual intervention in the - continuous delivery process. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/targets/*}", }, ] - request, metadata = self._interceptor.pre_get_automation(request, metadata) - pb_request = cloud_deploy.GetAutomationRequest.pb(request) + request, metadata = self._interceptor.pre_delete_target(request, metadata) + pb_request = cloud_deploy.DeleteTargetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3135,16 +3384,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = cloud_deploy.Automation() - pb_resp = cloud_deploy.Automation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_automation(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target(resp) return resp - class _GetAutomationRun(CloudDeployRestStub): + class _GetAutomation(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomationRun") + return hash("GetAutomation") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3158,7 +3405,100 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRunRequest, + request: cloud_deploy.GetAutomationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Automation: + r"""Call the get automation method over HTTP. + + Args: + request (~.cloud_deploy.GetAutomationRequest): + The request object. The request object for ``GetAutomation`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Automation: + An ``Automation`` resource in the Cloud Deploy API. + + An ``Automation`` enables the automation of manually + driven actions for a Delivery Pipeline, which includes + Release promotion among Targets, Rollout repair and + Rollout deployment strategy advancement. The intention + of Automation is to reduce manual intervention in the + continuous delivery process. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_automation(request, metadata) + pb_request = cloud_deploy.GetAutomationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Automation() + pb_resp = cloud_deploy.Automation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_automation(resp) + return resp + + class _GetAutomationRun(CloudDeployRestStub): + def __hash__(self): + return hash("GetAutomationRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetAutomationRunRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, @@ -3502,6 +3842,97 @@ def __call__( resp = self._interceptor.post_get_delivery_pipeline(resp) return resp + class _GetDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("GetDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Call the get deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.GetDeployPolicyRequest): + The request object. The request object for ``GetDeployPolicy`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.DeployPolicy: + A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.GetDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.DeployPolicy() + pb_resp = cloud_deploy.DeployPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deploy_policy(resp) + return resp + class _GetJobRun(CloudDeployRestStub): def __hash__(self): return hash("GetJobRun") @@ -4300,6 +4731,93 @@ def __call__( resp = self._interceptor.post_list_delivery_pipelines(resp) return resp + class _ListDeployPolicies(CloudDeployRestStub): + def __hash__(self): + return hash("ListDeployPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListDeployPoliciesResponse: + r"""Call the list deploy policies method over HTTP. + + Args: + request (~.cloud_deploy.ListDeployPoliciesRequest): + The request object. The request object for ``ListDeployPolicies``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListDeployPoliciesResponse: + The response object from ``ListDeployPolicies``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_deploy_policies( + request, metadata + ) + pb_request = cloud_deploy.ListDeployPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListDeployPoliciesResponse() + pb_resp = cloud_deploy.ListDeployPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deploy_policies(resp) + return resp + class _ListJobRuns(CloudDeployRestStub): def __hash__(self): return hash("ListJobRuns") @@ -5216,6 +5734,103 @@ def __call__( resp = self._interceptor.post_update_delivery_pipeline(resp) return resp + class _UpdateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("UpdateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.UpdateDeployPolicyRequest): + The request object. The request object for ``UpdateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_update_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.UpdateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deploy_policy(resp) + return resp + class _UpdateTarget(CloudDeployRestStub): def __hash__(self): return hash("UpdateTarget") @@ -5390,6 +6005,14 @@ def create_delivery_pipeline( # In C++ this would require a dynamic_cast return self._CreateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def create_release( self, @@ -5442,6 +6065,14 @@ def delete_delivery_pipeline( # In C++ this would require a dynamic_cast return self._DeleteDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def delete_target( self, @@ -5494,6 +6125,14 @@ def get_delivery_pipeline( # In C++ this would require a dynamic_cast return self._GetDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def get_job_run( self, @@ -5577,6 +6216,17 @@ def list_delivery_pipelines( # In C++ this would require a dynamic_cast return self._ListDeliveryPipelines(self._session, self._host, self._interceptor) # type: ignore + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployPolicies(self._session, self._host, self._interceptor) # type: ignore + @property def list_job_runs( self, @@ -5669,6 +6319,14 @@ def update_delivery_pipeline( # In C++ this would require a dynamic_cast return self._UpdateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def update_target( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py index 215038669273..7017500e27c7 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py @@ -52,6 +52,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -65,20 +66,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -97,6 +103,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -107,10 +115,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -135,6 +147,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -150,15 +163,19 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from .customtargettype_notification_payload import CustomTargetTypeNotificationEvent from .deliverypipeline_notification_payload import DeliveryPipelineNotificationEvent +from .deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent from .deploypolicy_notification_payload import DeployPolicyNotificationEvent from .jobrun_notification_payload import JobRunNotificationEvent from .log_enums import Type @@ -206,6 +223,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -219,20 +237,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -251,6 +274,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -261,10 +286,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -288,6 +317,7 @@ "RollbackTargetRequest", "RollbackTargetResponse", "Rollout", + "RolloutRestriction", "RuntimeConfig", "SerialPipeline", "SkaffoldModules", @@ -302,17 +332,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", "BackoffMode", "RepairState", "SkaffoldSupportState", "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index c7d9e95ecf4f..809363dd9595 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -21,6 +21,8 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -80,8 +82,24 @@ "CreateCustomTargetTypeRequest", "UpdateCustomTargetTypeRequest", "DeleteCustomTargetTypeRequest", + "DeployPolicy", + "DeployPolicyResourceSelector", + "DeliveryPipelineAttribute", "TargetAttribute", + "PolicyRule", + "RolloutRestriction", + "TimeWindows", + "OneTimeWindow", + "WeeklyWindow", + "PolicyViolation", + "PolicyViolationDetails", "Release", + "CreateDeployPolicyRequest", + "UpdateDeployPolicyRequest", + "DeleteDeployPolicyRequest", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", + "GetDeployPolicyRequest", "BuildArtifact", "TargetArtifact", "DeployArtifact", @@ -1387,6 +1405,9 @@ class RollbackTargetRequest(proto.Message): validate_only (bool): Optional. If set to true, the request is validated and the user is provided with a ``RollbackTargetResponse``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deploy_policy}``. """ name: str = proto.Field( @@ -1418,6 +1439,10 @@ class RollbackTargetRequest(proto.Message): proto.BOOL, number=7, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) class RollbackTargetResponse(proto.Message): @@ -2398,7 +2423,7 @@ class SkaffoldGCSSource(proto.Message): Attributes: source (str): Required. Cloud Storage source paths to copy recursively. - For example, providing "gs://my-bucket/dir/configs/*" will + For example, providing ``"gs://my-bucket/dir/configs/*"`` will result in Skaffold copying all files within the "dir/configs" directory in the bucket "my-bucket". path (str): @@ -2761,8 +2786,212 @@ class DeleteCustomTargetTypeRequest(proto.Message): ) +class DeployPolicy(proto.Message): + r"""A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation-driven actions + within a Delivery Pipeline or Target. + + Attributes: + name (str): + Output only. Name of the ``DeployPolicy``. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. + The ``deployPolicy`` component must match + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` + uid (str): + Output only. Unique identifier of the ``DeployPolicy``. + description (str): + Description of the ``DeployPolicy``. Max length is 255 + characters. + annotations (MutableMapping[str, str]): + User annotations. These attributes can only be set and used + by the user, and not by Cloud Deploy. Annotations must meet + the following constraints: + + - Annotations are key/value pairs. + - Valid annotation keys have two segments: an optional + prefix and name, separated by a slash (``/``). + - The name segment is required and must be 63 characters or + less, beginning and ending with an alphanumeric character + (``[a-z0-9A-Z]``) with dashes (``-``), underscores + (``_``), dots (``.``), and alphanumerics between. + - The prefix is optional. If specified, the prefix must be + a DNS subdomain: a series of DNS labels separated by + dots(\ ``.``), not longer than 253 characters in total, + followed by a slash (``/``). + + See + https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set + for more details. + labels (MutableMapping[str, str]): + Labels are attributes that can be set and used by both the + user and by Cloud Deploy. Labels must meet the following + constraints: + + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. + - All characters must use UTF-8 encoding, and international + characters are allowed. + - Keys must start with a lowercase letter or international + character. + - Each resource is limited to a maximum of 64 labels. + + Both keys and values are additionally constrained to be <= + 128 bytes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the deploy policy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Most recent time at which the + deploy policy was updated. + suspended (bool): + When suspended, the policy will not prevent + actions from occurring, even if the action + violates the policy. + selectors (MutableSequence[google.cloud.deploy_v1.types.DeployPolicyResourceSelector]): + Required. Selected resources to which the + policy will be applied. At least one selector is + required. If one selector matches the resource + the policy applies. For example, if there are + two selectors and the action being attempted + matches one of them, the policy will apply to + that action. + rules (MutableSequence[google.cloud.deploy_v1.types.PolicyRule]): + Required. Rules to apply. At least one rule + must be present. + etag (str): + The weak etag of the ``Automation`` resource. This checksum + is computed by the server based on the value of other + fields, and may be sent on update and delete requests to + ensure the client has an up-to-date value before proceeding. + """ + + class Invoker(proto.Enum): + r"""What invoked the action. Filters enforcing the policy + depending on what invoked the action. + + Values: + INVOKER_UNSPECIFIED (0): + Unspecified. + USER (1): + The action is user-driven. For example, + creating a rollout manually via a gcloud create + command. + DEPLOY_AUTOMATION (2): + Automated action by Cloud Deploy. + """ + INVOKER_UNSPECIFIED = 0 + USER = 1 + DEPLOY_AUTOMATION = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + suspended: bool = proto.Field( + proto.BOOL, + number=8, + ) + selectors: MutableSequence["DeployPolicyResourceSelector"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="DeployPolicyResourceSelector", + ) + rules: MutableSequence["PolicyRule"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="PolicyRule", + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + + +class DeployPolicyResourceSelector(proto.Message): + r"""Contains information on the resources to select for a deploy + policy. Attributes provided must all match the resource in order + for policy restrictions to apply. For example, if delivery + pipelines attributes given are an id "prod" and labels "foo: + bar", a delivery pipeline resource must match both that id and + have that label in order to be subject to the policy. + + Attributes: + delivery_pipeline (google.cloud.deploy_v1.types.DeliveryPipelineAttribute): + Optional. Contains attributes about a + delivery pipeline. + target (google.cloud.deploy_v1.types.TargetAttribute): + Optional. Contains attributes about a target. + """ + + delivery_pipeline: "DeliveryPipelineAttribute" = proto.Field( + proto.MESSAGE, + number=1, + message="DeliveryPipelineAttribute", + ) + target: "TargetAttribute" = proto.Field( + proto.MESSAGE, + number=2, + message="TargetAttribute", + ) + + +class DeliveryPipelineAttribute(proto.Message): + r"""Contains criteria for selecting DeliveryPipelines. + + Attributes: + id (str): + ID of the ``DeliveryPipeline``. The value of this field + could be one of the following: + + - The last segment of a pipeline name + - "*", all delivery pipelines in a location + labels (MutableMapping[str, str]): + DeliveryPipeline labels. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + class TargetAttribute(proto.Message): - r"""Contains criteria for selecting Targets. + r"""Contains criteria for selecting Targets. This could be used + to select targets for a Deploy Policy or for an Automation. Attributes: id (str): @@ -2786,6 +3015,262 @@ class TargetAttribute(proto.Message): ) +class PolicyRule(proto.Message): + r"""Deploy Policy rule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rollout_restriction (google.cloud.deploy_v1.types.RolloutRestriction): + Rollout restrictions. + + This field is a member of `oneof`_ ``rule``. + """ + + rollout_restriction: "RolloutRestriction" = proto.Field( + proto.MESSAGE, + number=2, + oneof="rule", + message="RolloutRestriction", + ) + + +class RolloutRestriction(proto.Message): + r"""Rollout restrictions. + + Attributes: + id (str): + Required. Restriction rule ID. Required and must be unique + within a DeployPolicy. The format is + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``. + invokers (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy.Invoker]): + Optional. What invoked the action. If left + empty, all invoker types will be restricted. + actions (MutableSequence[google.cloud.deploy_v1.types.RolloutRestriction.RolloutActions]): + Optional. Rollout actions to be restricted as + part of the policy. If left empty, all actions + will be restricted. + time_windows (google.cloud.deploy_v1.types.TimeWindows): + Required. Time window within which actions + are restricted. + """ + + class RolloutActions(proto.Enum): + r"""Rollout actions to be restricted as part of the policy. + + Values: + ROLLOUT_ACTIONS_UNSPECIFIED (0): + Unspecified. + ADVANCE (1): + Advance the rollout to the next phase. + APPROVE (2): + Approve the rollout. + CANCEL (3): + Cancel the rollout. + CREATE (4): + Create a rollout. + IGNORE_JOB (5): + Ignore a job result on the rollout. + RETRY_JOB (6): + Retry a job for a rollout. + ROLLBACK (7): + Rollback a rollout. + TERMINATE_JOBRUN (8): + Terminate a jobrun. + """ + ROLLOUT_ACTIONS_UNSPECIFIED = 0 + ADVANCE = 1 + APPROVE = 2 + CANCEL = 3 + CREATE = 4 + IGNORE_JOB = 5 + RETRY_JOB = 6 + ROLLBACK = 7 + TERMINATE_JOBRUN = 8 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + invokers: MutableSequence["DeployPolicy.Invoker"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="DeployPolicy.Invoker", + ) + actions: MutableSequence[RolloutActions] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=RolloutActions, + ) + time_windows: "TimeWindows" = proto.Field( + proto.MESSAGE, + number=4, + message="TimeWindows", + ) + + +class TimeWindows(proto.Message): + r"""Time windows within which actions are restricted. See the + `documentation `__ + for more information on how to configure dates/times. + + Attributes: + time_zone (str): + Required. The time zone in IANA format `IANA Time Zone + Database `__ (e.g. + America/New_York). + one_time_windows (MutableSequence[google.cloud.deploy_v1.types.OneTimeWindow]): + Optional. One-time windows within which + actions are restricted. + weekly_windows (MutableSequence[google.cloud.deploy_v1.types.WeeklyWindow]): + Optional. Recurring weekly windows within + which actions are restricted. + """ + + time_zone: str = proto.Field( + proto.STRING, + number=1, + ) + one_time_windows: MutableSequence["OneTimeWindow"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OneTimeWindow", + ) + weekly_windows: MutableSequence["WeeklyWindow"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="WeeklyWindow", + ) + + +class OneTimeWindow(proto.Message): + r"""One-time window within which actions are restricted. For + example, blocking actions over New Year's Eve from December 31st + at 5pm to January 1st at 9am. + + Attributes: + start_date (google.type.date_pb2.Date): + Required. Start date. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time (inclusive). Use 00:00 + for the beginning of the day. + end_date (google.type.date_pb2.Date): + Required. End date. + end_time (google.type.timeofday_pb2.TimeOfDay): + Required. End time (exclusive). You may use + 24:00 for the end of the day. + """ + + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + message=date_pb2.Date, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + + +class WeeklyWindow(proto.Message): + r"""Weekly windows. For example, blocking actions every Saturday + and Sunday. Another example would be blocking actions every + weekday from 5pm to midnight. + + Attributes: + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Days of week. If left empty, all + days of the week will be included. + start_time (google.type.timeofday_pb2.TimeOfDay): + Optional. Start time (inclusive). Use 00:00 for the + beginning of the day. If you specify start_time you must + also specify end_time. If left empty, this will block for + the entire day for the days specified in days_of_week. + end_time (google.type.timeofday_pb2.TimeOfDay): + Optional. End time (exclusive). Use 24:00 to indicate + midnight. If you specify end_time you must also specify + start_time. If left empty, this will block for the entire + day for the days specified in days_of_week. + """ + + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + + +class PolicyViolation(proto.Message): + r"""Returned from an action if one or more policies were + violated, and therefore the action was prevented. Contains + information about what policies were violated and why. + + Attributes: + policy_violation_details (MutableSequence[google.cloud.deploy_v1.types.PolicyViolationDetails]): + Policy violation details. + """ + + policy_violation_details: MutableSequence[ + "PolicyViolationDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PolicyViolationDetails", + ) + + +class PolicyViolationDetails(proto.Message): + r"""Policy violation details. + + Attributes: + policy (str): + Name of the policy that was violated. Policy resource will + be in the format of + ``projects/{project}/locations/{location}/policies/{policy}``. + rule_id (str): + Id of the rule that triggered the policy + violation. + failure_message (str): + User readable message about why the request + violated a policy. This is not intended for + machine parsing. + """ + + policy: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + failure_message: str = proto.Field( + proto.STRING, + number=3, + ) + + class Release(proto.Message): r"""A ``Release`` resource in the Cloud Deploy API. @@ -3084,124 +3569,417 @@ class ReleaseCondition(proto.Message): release's Skaffold version. """ - release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( - proto.MESSAGE, - number=1, - message="Release.ReleaseReadyCondition", - ) - skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( - proto.Field( - proto.MESSAGE, - number=2, - message="Release.SkaffoldSupportedCondition", - ) - ) + release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( + proto.MESSAGE, + number=1, + message="Release.ReleaseReadyCondition", + ) + skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="Release.SkaffoldSupportedCondition", + ) + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + abandoned: bool = proto.Field( + proto.BOOL, + number=23, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + render_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + render_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + skaffold_config_uri: str = proto.Field( + proto.STRING, + number=17, + ) + skaffold_config_path: str = proto.Field( + proto.STRING, + number=9, + ) + build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="BuildArtifact", + ) + delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( + proto.MESSAGE, + number=11, + message="DeliveryPipeline", + ) + target_snapshots: MutableSequence["Target"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="Target", + ) + custom_target_type_snapshots: MutableSequence[ + "CustomTargetType" + ] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message="CustomTargetType", + ) + render_state: RenderState = proto.Field( + proto.ENUM, + number=13, + enum=RenderState, + ) + etag: str = proto.Field( + proto.STRING, + number=16, + ) + skaffold_version: str = proto.Field( + proto.STRING, + number=19, + ) + target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=20, + message="TargetArtifact", + ) + target_renders: MutableMapping[str, TargetRender] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=22, + message=TargetRender, + ) + condition: ReleaseCondition = proto.Field( + proto.MESSAGE, + number=24, + message=ReleaseCondition, + ) + deploy_parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=25, + ) + + +class CreateDeployPolicyRequest(proto.Message): + r"""The request object for ``CreateDeployPolicy``. + + Attributes: + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deploy_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="DeployPolicy", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateDeployPolicyRequest(proto.Message): + r"""The request object for ``UpdateDeployPolicy``. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` resource. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it's in the mask. If the user doesn't provide a mask then + all fields are overwritten. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, updating a ``DeployPolicy`` that + does not exist will result in the creation of a new + ``DeployPolicy``. + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ - name: str = proto.Field( - proto.STRING, + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, number=1, + message=field_mask_pb2.FieldMask, ) - uid: str = proto.Field( - proto.STRING, + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, number=2, + message="DeployPolicy", ) - description: str = proto.Field( + request_id: str = proto.Field( proto.STRING, number=3, ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, + allow_missing: bool = proto.Field( + proto.BOOL, number=4, ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - abandoned: bool = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, - number=23, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - render_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - render_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, + number=5, ) - skaffold_config_uri: str = proto.Field( + + +class DeleteDeployPolicyRequest(proto.Message): + r"""The request object for ``DeleteDeployPolicy``. + + Attributes: + name (str): + Required. The name of the ``DeployPolicy`` to delete. The + format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, then deleting an already deleted + or non-existing ``DeployPolicy`` will succeed. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not actually post it. + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( proto.STRING, - number=17, + number=1, ) - skaffold_config_path: str = proto.Field( + request_id: str = proto.Field( proto.STRING, - number=9, + number=2, ) - build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="BuildArtifact", + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, ) - delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( - proto.MESSAGE, - number=11, - message="DeliveryPipeline", + validate_only: bool = proto.Field( + proto.BOOL, + number=4, ) - target_snapshots: MutableSequence["Target"] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message="Target", + etag: str = proto.Field( + proto.STRING, + number=5, ) - custom_target_type_snapshots: MutableSequence[ - "CustomTargetType" - ] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message="CustomTargetType", + + +class ListDeployPoliciesRequest(proto.Message): + r"""The request object for ``ListDeployPolicies``. + + Attributes: + parent (str): + Required. The parent, which owns this collection of deploy + policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + page_size (int): + The maximum number of deploy policies to + return. The service may return fewer than this + value. If unspecified, at most 50 deploy + policies will be returned. The maximum value is + 1000; values above 1000 will be set to 1000. + page_token (str): + A page token, received from a previous + ``ListDeployPolicies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other provided parameters match the + call that provided the page token. + filter (str): + Filter deploy policies to be returned. See + https://google.aip.dev/160 for more details. All + fields can be used in the filter. + order_by (str): + Field to sort by. See + https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, ) - render_state: RenderState = proto.Field( - proto.ENUM, - number=13, - enum=RenderState, + page_size: int = proto.Field( + proto.INT32, + number=2, ) - etag: str = proto.Field( + page_token: str = proto.Field( proto.STRING, - number=16, + number=3, ) - skaffold_version: str = proto.Field( + filter: str = proto.Field( proto.STRING, - number=19, + number=4, ) - target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + order_by: str = proto.Field( proto.STRING, - proto.MESSAGE, - number=20, - message="TargetArtifact", + number=5, ) - target_renders: MutableMapping[str, TargetRender] = proto.MapField( - proto.STRING, + + +class ListDeployPoliciesResponse(proto.Message): + r"""The response object from ``ListDeployPolicies``. + + Attributes: + deploy_policies (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy]): + The ``DeployPolicy`` objects. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + deploy_policies: MutableSequence["DeployPolicy"] = proto.RepeatedField( proto.MESSAGE, - number=22, - message=TargetRender, + number=1, + message="DeployPolicy", ) - condition: ReleaseCondition = proto.Field( - proto.MESSAGE, - number=24, - message=ReleaseCondition, + next_page_token: str = proto.Field( + proto.STRING, + number=2, ) - deploy_parameters: MutableMapping[str, str] = proto.MapField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, + number=3, + ) + + +class GetDeployPolicyRequest(proto.Message): + r"""The request object for ``GetDeployPolicy`` + + Attributes: + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + """ + + name: str = proto.Field( proto.STRING, - number=25, + number=1, ) @@ -3504,6 +4282,9 @@ class CreateReleaseRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ parent: str = proto.Field( @@ -3527,6 +4308,10 @@ class CreateReleaseRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) class Rollout(proto.Message): @@ -4457,6 +5242,9 @@ class CreateRolloutRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. starting_phase_id (str): Optional. The starting phase ID for the ``Rollout``. If empty the ``Rollout`` will start at the first phase. @@ -4483,6 +5271,10 @@ class CreateRolloutRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) starting_phase_id: str = proto.Field( proto.STRING, number=7, @@ -4560,6 +5352,9 @@ class ApproveRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. approved (bool): Required. True = approve; false = reject + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4570,6 +5365,10 @@ class ApproveRolloutRequest(proto.Message): proto.BOOL, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class ApproveRolloutResponse(proto.Message): @@ -4585,6 +5384,9 @@ class AdvanceRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. phase_id (str): Required. The phase ID to advance the ``Rollout`` to. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4595,6 +5397,10 @@ class AdvanceRolloutRequest(proto.Message): proto.STRING, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class AdvanceRolloutResponse(proto.Message): @@ -4608,12 +5414,19 @@ class CancelRolloutRequest(proto.Message): name (str): Required. Name of the Rollout. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class CancelRolloutResponse(proto.Message): @@ -4632,6 +5445,9 @@ class IgnoreJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to ignore. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4646,6 +5462,10 @@ class IgnoreJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class IgnoreJobResponse(proto.Message): @@ -4664,6 +5484,9 @@ class RetryJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to retry. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4678,6 +5501,10 @@ class RetryJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class RetryJobResponse(proto.Message): @@ -5310,12 +6137,19 @@ class TerminateJobRunRequest(proto.Message): name (str): Required. Name of the ``JobRun``. Format must be ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{jobRun}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class TerminateJobRunResponse(proto.Message): @@ -6095,6 +6929,9 @@ class AutomationRun(proto.Message): Output only. Explains the current state of the ``AutomationRun``. Present only when an explanation is needed. + policy_violation (google.cloud.deploy_v1.types.PolicyViolation): + Output only. Contains information about what policies + prevented the ``AutomationRun`` from proceeding. expire_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time the ``AutomationRun`` expires. An ``AutomationRun`` expires after 14 days from its creation @@ -6192,6 +7029,11 @@ class State(proto.Enum): proto.STRING, number=9, ) + policy_violation: "PolicyViolation" = proto.Field( + proto.MESSAGE, + number=10, + message="PolicyViolation", + ) expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py index 28a732b7b078..853e64e15f60 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py @@ -46,6 +46,8 @@ class Type(proto.Enum): Resource deleted. TYPE_ROLLOUT_UPDATE (7): Rollout updated. + TYPE_DEPLOY_POLICY_EVALUATION (8): + Deploy Policy evaluation. TYPE_RENDER_STATUES_CHANGE (2): Deprecated: This field is never used. Use release_render log type instead. @@ -57,6 +59,7 @@ class Type(proto.Enum): TYPE_RESTRICTION_VIOLATED = 5 TYPE_RESOURCE_DELETED = 6 TYPE_ROLLOUT_UPDATE = 7 + TYPE_DEPLOY_POLICY_EVALUATION = 8 TYPE_RENDER_STATUES_CHANGE = 2 diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py new file mode 100644 index 000000000000..5c0a67fbf626 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py new file mode 100644 index 000000000000..87445443040f --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py new file mode 100644 index 000000000000..6f6545a0350c --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py new file mode 100644 index 000000000000..1c9d7dd414fe --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py new file mode 100644 index 000000000000..a96e7a4309c2 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py new file mode 100644 index 000000000000..669a50729182 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py new file mode 100644 index 000000000000..f3932c8119a1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py new file mode 100644 index 000000000000..8c6baf6b8c95 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py new file mode 100644 index 000000000000..d08ab6a4ded1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py new file mode 100644 index 000000000000..18ab7cd6a8b9 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index b4f5eeee1a80..dfbc37400a05 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -1355,6 +1355,183 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_create_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2377,19 +2554,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2409,13 +2586,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async", "segments": [ { "end": 55, @@ -2448,7 +2625,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py" }, { "canonical": true, @@ -2457,19 +2634,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2489,13 +2666,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync", "segments": [ { "end": 55, @@ -2528,7 +2705,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py" }, { "canonical": true, @@ -2538,19 +2715,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2569,22 +2746,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2599,17 +2776,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" }, { "canonical": true, @@ -2618,19 +2795,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2649,22 +2826,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2679,17 +2856,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" }, { "canonical": true, @@ -2699,19 +2876,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" }, { "name": "name", @@ -2730,14 +2907,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.Automation", - "shortName": "get_automation" + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" }, - "description": "Sample for GetAutomation", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", "segments": [ { "end": 51, @@ -2770,7 +2947,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" }, { "canonical": true, @@ -2779,14 +2956,175 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" + }, + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.Automation", + "shortName": "get_automation" + }, + "description": "Sample for GetAutomation", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" }, "parameters": [ { @@ -3335,6 +3673,167 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_get_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4278,11 +4777,172 @@ "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationRunsPager", "shortName": "list_automation_runs" }, - "description": "Sample for ListAutomationRuns", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "description": "Sample for ListAutomationRuns", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", "segments": [ { "end": 52, @@ -4315,7 +4975,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" }, { "canonical": true, @@ -4325,19 +4985,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4356,14 +5016,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", "segments": [ { "end": 52, @@ -4396,7 +5056,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" }, { "canonical": true, @@ -4405,19 +5065,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4436,14 +5096,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", "segments": [ { "end": 52, @@ -4476,7 +5136,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" }, { "canonical": true, @@ -4486,19 +5146,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4517,14 +5177,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", "segments": [ { "end": 52, @@ -4557,7 +5217,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" }, { "canonical": true, @@ -4566,19 +5226,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4597,14 +5257,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", "segments": [ { "end": 52, @@ -4637,7 +5297,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" }, { "canonical": true, @@ -4647,19 +5307,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4678,14 +5338,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async", "segments": [ { "end": 52, @@ -4718,7 +5378,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py" }, { "canonical": true, @@ -4727,19 +5387,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4758,14 +5418,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync", "segments": [ { "end": 52, @@ -4798,7 +5458,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py" }, { "canonical": true, @@ -6466,6 +7126,175 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_update_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py index 1a652b4ea5be..10255de70644 100644 --- a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py +++ b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py @@ -40,44 +40,49 @@ class deployCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'abandon_release': ('name', ), - 'advance_rollout': ('name', 'phase_id', ), - 'approve_rollout': ('name', 'approved', ), + 'advance_rollout': ('name', 'phase_id', 'override_deploy_policy', ), + 'approve_rollout': ('name', 'approved', 'override_deploy_policy', ), 'cancel_automation_run': ('name', ), - 'cancel_rollout': ('name', ), + 'cancel_rollout': ('name', 'override_deploy_policy', ), 'create_automation': ('parent', 'automation_id', 'automation', 'request_id', 'validate_only', ), 'create_custom_target_type': ('parent', 'custom_target_type_id', 'custom_target_type', 'request_id', 'validate_only', ), 'create_delivery_pipeline': ('parent', 'delivery_pipeline_id', 'delivery_pipeline', 'request_id', 'validate_only', ), - 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', ), - 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'starting_phase_id', ), + 'create_deploy_policy': ('parent', 'deploy_policy_id', 'deploy_policy', 'request_id', 'validate_only', ), + 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', 'override_deploy_policy', ), + 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'override_deploy_policy', 'starting_phase_id', ), 'create_target': ('parent', 'target_id', 'target', 'request_id', 'validate_only', ), 'delete_automation': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_custom_target_type': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_delivery_pipeline': ('name', 'request_id', 'allow_missing', 'validate_only', 'force', 'etag', ), + 'delete_deploy_policy': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_target': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'get_automation': ('name', ), 'get_automation_run': ('name', ), 'get_config': ('name', ), 'get_custom_target_type': ('name', ), 'get_delivery_pipeline': ('name', ), + 'get_deploy_policy': ('name', ), 'get_job_run': ('name', ), 'get_release': ('name', ), 'get_rollout': ('name', ), 'get_target': ('name', ), - 'ignore_job': ('rollout', 'phase_id', 'job_id', ), + 'ignore_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), 'list_automation_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_automations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_custom_target_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_delivery_pipelines': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_deploy_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_job_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_releases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_rollouts': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_targets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'retry_job': ('rollout', 'phase_id', 'job_id', ), - 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', ), - 'terminate_job_run': ('name', ), + 'retry_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), + 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', 'override_deploy_policy', ), + 'terminate_job_run': ('name', 'override_deploy_policy', ), 'update_automation': ('update_mask', 'automation', 'request_id', 'allow_missing', 'validate_only', ), 'update_custom_target_type': ('update_mask', 'custom_target_type', 'request_id', 'allow_missing', 'validate_only', ), 'update_delivery_pipeline': ('update_mask', 'delivery_pipeline', 'request_id', 'allow_missing', 'validate_only', ), + 'update_deploy_policy': ('update_mask', 'deploy_policy', 'request_id', 'allow_missing', 'validate_only', ), 'update_target': ('update_mask', 'target', 'request_id', 'allow_missing', 'validate_only', ), } diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index ff22c1a2c000..b6c62348fc82 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -53,6 +53,9 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -9748,11 +9751,11 @@ async def test_abandon_release_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ApproveRolloutRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_approve_rollout(request_type, transport: str = "grpc"): +def test_create_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9763,22 +9766,24 @@ def test_approve_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() - response = client.approve_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) -def test_approve_rollout_empty_call(): +def test_create_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -9787,17 +9792,19 @@ def test_approve_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout() + client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() -def test_approve_rollout_non_empty_request_with_auto_populated_field(): +def test_create_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -9808,24 +9815,30 @@ def test_approve_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ApproveRolloutRequest( - name="name_value", + request = cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout(request=request) + client.create_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) -def test_approve_rollout_use_cached_wrapped_rpc(): +def test_create_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9839,21 +9852,30 @@ def test_approve_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.approve_rollout in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.approve_rollout(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.approve_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9861,7 +9883,7 @@ def test_approve_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_approve_rollout_empty_call_async(): +async def test_create_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -9870,19 +9892,21 @@ async def test_approve_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout() + response = await client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() @pytest.mark.asyncio -async def test_approve_rollout_async_use_cached_wrapped_rpc( +async def test_create_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9899,7 +9923,7 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy in client._client._transport._wrapped_methods ) @@ -9907,16 +9931,21 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy ] = mock_rpc request = {} - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.approve_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9924,8 +9953,8 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_approve_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest +async def test_create_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9937,43 +9966,47 @@ async def test_approve_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout(request) + response = await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_approve_rollout_async_from_dict(): - await test_approve_rollout_async(request_type=dict) +async def test_create_deploy_policy_async_from_dict(): + await test_create_deploy_policy_async(request_type=dict) -def test_approve_rollout_field_headers(): +def test_create_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: - call.return_value = cloud_deploy.ApproveRolloutResponse() - client.approve_rollout(request) + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9984,28 +10017,30 @@ def test_approve_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_approve_rollout_field_headers_async(): +async def test_create_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10016,35 +10051,45 @@ async def test_approve_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_approve_rollout_flattened(): +def test_create_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.approve_rollout( - name="name_value", + client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val -def test_approve_rollout_flattened_error(): +def test_create_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10052,43 +10097,55 @@ def test_approve_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.asyncio -async def test_approve_rollout_flattened_async(): +async def test_create_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.approve_rollout( - name="name_value", + response = await client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_approve_rollout_flattened_error_async(): +async def test_create_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10096,20 +10153,22 @@ async def test_approve_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + await client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AdvanceRolloutRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_advance_rollout(request_type, transport: str = "grpc"): +def test_update_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10120,22 +10179,24 @@ def test_advance_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() - response = client.advance_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) -def test_advance_rollout_empty_call(): +def test_update_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10144,17 +10205,19 @@ def test_advance_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout() + client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() -def test_advance_rollout_non_empty_request_with_auto_populated_field(): +def test_update_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10165,26 +10228,26 @@ def test_advance_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + request = cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout(request=request) + client.update_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) -def test_advance_rollout_use_cached_wrapped_rpc(): +def test_update_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10198,21 +10261,30 @@ def test_advance_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.advance_rollout in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.advance_rollout(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.advance_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10220,7 +10292,7 @@ def test_advance_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_advance_rollout_empty_call_async(): +async def test_update_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10229,19 +10301,21 @@ async def test_advance_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout() + response = await client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() @pytest.mark.asyncio -async def test_advance_rollout_async_use_cached_wrapped_rpc( +async def test_update_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10258,7 +10332,7 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy in client._client._transport._wrapped_methods ) @@ -10266,16 +10340,21 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy ] = mock_rpc request = {} - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.advance_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10283,8 +10362,8 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_advance_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest +async def test_update_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10296,43 +10375,47 @@ async def test_advance_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout(request) + response = await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_advance_rollout_async_from_dict(): - await test_advance_rollout_async(request_type=dict) +async def test_update_deploy_policy_async_from_dict(): + await test_update_deploy_policy_async(request_type=dict) -def test_advance_rollout_field_headers(): +def test_update_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: - call.return_value = cloud_deploy.AdvanceRolloutResponse() - client.advance_rollout(request) + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10343,28 +10426,30 @@ def test_advance_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_advance_rollout_field_headers_async(): +async def test_update_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10375,39 +10460,41 @@ async def test_advance_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] -def test_advance_rollout_flattened(): +def test_update_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_advance_rollout_flattened_error(): +def test_update_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10415,48 +10502,50 @@ def test_advance_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_advance_rollout_flattened_async(): +async def test_update_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + response = await client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_advance_rollout_flattened_error_async(): +async def test_update_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10464,21 +10553,21 @@ async def test_advance_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + await client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CancelRolloutRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_cancel_rollout(request_type, transport: str = "grpc"): +def test_delete_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10489,22 +10578,24 @@ def test_cancel_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() - response = client.cancel_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) -def test_cancel_rollout_empty_call(): +def test_delete_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10513,17 +10604,19 @@ def test_cancel_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout() + client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() -def test_cancel_rollout_non_empty_request_with_auto_populated_field(): +def test_delete_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10534,24 +10627,30 @@ def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelRolloutRequest( + request = cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout(request=request) + client.delete_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest( + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_rollout_use_cached_wrapped_rpc(): +def test_delete_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10565,21 +10664,30 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.cancel_rollout in client._transport._wrapped_methods + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc request = {} - client.cancel_rollout(request) + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10587,7 +10695,7 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_rollout_empty_call_async(): +async def test_delete_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10596,19 +10704,21 @@ async def test_cancel_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout() + response = await client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() @pytest.mark.asyncio -async def test_cancel_rollout_async_use_cached_wrapped_rpc( +async def test_delete_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10625,7 +10735,7 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy in client._client._transport._wrapped_methods ) @@ -10633,16 +10743,21 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy ] = mock_rpc request = {} - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.cancel_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10650,8 +10765,8 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_cancel_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest +async def test_delete_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10663,43 +10778,47 @@ async def test_cancel_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout(request) + response = await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_rollout_async_from_dict(): - await test_cancel_rollout_async(request_type=dict) +async def test_delete_deploy_policy_async_from_dict(): + await test_delete_deploy_policy_async(request_type=dict) -def test_cancel_rollout_field_headers(): +def test_delete_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: - call.return_value = cloud_deploy.CancelRolloutResponse() - client.cancel_rollout(request) + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10715,23 +10834,25 @@ def test_cancel_rollout_field_headers(): @pytest.mark.asyncio -async def test_cancel_rollout_field_headers_async(): +async def test_delete_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10746,18 +10867,20 @@ async def test_cancel_rollout_field_headers_async(): ) in kw["metadata"] -def test_cancel_rollout_flattened(): +def test_delete_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_rollout( + client.delete_deploy_policy( name="name_value", ) @@ -10770,7 +10893,7 @@ def test_cancel_rollout_flattened(): assert arg == mock_val -def test_cancel_rollout_flattened_error(): +def test_delete_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10778,29 +10901,31 @@ def test_cancel_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_rollout_flattened_async(): +async def test_delete_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_rollout( + response = await client.delete_deploy_policy( name="name_value", ) @@ -10814,7 +10939,7 @@ async def test_cancel_rollout_flattened_async(): @pytest.mark.asyncio -async def test_cancel_rollout_flattened_error_async(): +async def test_delete_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10822,8 +10947,8 @@ async def test_cancel_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + await client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @@ -10831,11 +10956,11 @@ async def test_cancel_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListRolloutsRequest, + cloud_deploy.ListDeployPoliciesRequest, dict, ], ) -def test_list_rollouts(request_type, transport: str = "grpc"): +def test_list_deploy_policies(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10846,27 +10971,29 @@ def test_list_rollouts(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse( + call.return_value = cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_rollouts(request) + response = client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsPager) + assert isinstance(response, pagers.ListDeployPoliciesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_rollouts_empty_call(): +def test_list_deploy_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10875,17 +11002,19 @@ def test_list_rollouts_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts() + client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() -def test_list_rollouts_non_empty_request_with_auto_populated_field(): +def test_list_deploy_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10896,7 +11025,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListRolloutsRequest( + request = cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10904,14 +11033,16 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts(request=request) + client.list_deploy_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest( + assert args[0] == cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10919,7 +11050,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) -def test_list_rollouts_use_cached_wrapped_rpc(): +def test_list_deploy_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10933,21 +11064,25 @@ def test_list_rollouts_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_rollouts in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10955,7 +11090,7 @@ def test_list_rollouts_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_rollouts_empty_call_async(): +async def test_list_deploy_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10964,22 +11099,24 @@ async def test_list_rollouts_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts() + response = await client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() @pytest.mark.asyncio -async def test_list_rollouts_async_use_cached_wrapped_rpc( +async def test_list_deploy_policies_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10996,7 +11133,7 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies in client._client._transport._wrapped_methods ) @@ -11004,16 +11141,16 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies ] = mock_rpc request = {} - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11021,8 +11158,8 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_rollouts_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest +async def test_list_deploy_policies_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11034,48 +11171,52 @@ async def test_list_rollouts_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts(request) + response = await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsAsyncPager) + assert isinstance(response, pagers.ListDeployPoliciesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_rollouts_async_from_dict(): - await test_list_rollouts_async(request_type=dict) +async def test_list_deploy_policies_async_from_dict(): + await test_list_deploy_policies_async(request_type=dict) -def test_list_rollouts_field_headers(): +def test_list_deploy_policies_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: - call.return_value = cloud_deploy.ListRolloutsResponse() - client.list_rollouts(request) + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: + call.return_value = cloud_deploy.ListDeployPoliciesResponse() + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11091,23 +11232,25 @@ def test_list_rollouts_field_headers(): @pytest.mark.asyncio -async def test_list_rollouts_field_headers_async(): +async def test_list_deploy_policies_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11122,18 +11265,20 @@ async def test_list_rollouts_field_headers_async(): ) in kw["metadata"] -def test_list_rollouts_flattened(): +def test_list_deploy_policies_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_rollouts( + client.list_deploy_policies( parent="parent_value", ) @@ -11146,7 +11291,7 @@ def test_list_rollouts_flattened(): assert arg == mock_val -def test_list_rollouts_flattened_error(): +def test_list_deploy_policies_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11154,29 +11299,31 @@ def test_list_rollouts_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_rollouts_flattened_async(): +async def test_list_deploy_policies_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_rollouts( + response = await client.list_deploy_policies( parent="parent_value", ) @@ -11190,7 +11337,7 @@ async def test_list_rollouts_flattened_async(): @pytest.mark.asyncio -async def test_list_rollouts_flattened_error_async(): +async def test_list_deploy_policies_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11198,44 +11345,46 @@ async def test_list_rollouts_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + await client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) -def test_list_rollouts_pager(transport_name: str = "grpc"): +def test_list_deploy_policies_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11247,7 +11396,7 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_rollouts(request={}, retry=retry, timeout=timeout) + pager = client.list_deploy_policies(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -11255,89 +11404,93 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in results) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) -def test_list_rollouts_pages(transport_name: str = "grpc"): +def test_list_deploy_policies_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - pages = list(client.list_rollouts(request={}).pages) + pages = list(client.list_deploy_policies(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_rollouts_async_pager(): +async def test_list_deploy_policies_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - async_pager = await client.list_rollouts( + async_pager = await client.list_deploy_policies( request={}, ) assert async_pager.next_page_token == "abc" @@ -11346,43 +11499,45 @@ async def test_list_rollouts_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in responses) @pytest.mark.asyncio -async def test_list_rollouts_async_pages(): +async def test_list_deploy_policies_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11391,7 +11546,7 @@ async def test_list_rollouts_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_rollouts(request={}) + await client.list_deploy_policies(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -11401,11 +11556,11 @@ async def test_list_rollouts_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetRolloutRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_get_rollout(request_type, transport: str = "grpc"): +def test_get_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11416,52 +11571,35 @@ def test_get_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout( + call.return_value = cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_rollout(request) + response = client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_rollout_empty_call(): +def test_get_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11470,17 +11608,19 @@ def test_get_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout() + client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() -def test_get_rollout_non_empty_request_with_auto_populated_field(): +def test_get_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11491,24 +11631,26 @@ def test_get_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetRolloutRequest( + request = cloud_deploy.GetDeployPolicyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout(request=request) + client.get_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest( + assert args[0] == cloud_deploy.GetDeployPolicyRequest( name="name_value", ) -def test_get_rollout_use_cached_wrapped_rpc(): +def test_get_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11522,21 +11664,23 @@ def test_get_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_rollout in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11544,7 +11688,7 @@ def test_get_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_rollout_empty_call_async(): +async def test_get_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11553,33 +11697,27 @@ async def test_get_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout() + response = await client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() @pytest.mark.asyncio -async def test_get_rollout_async_use_cached_wrapped_rpc( +async def test_get_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11596,7 +11734,7 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_rollout + client._client._transport.get_deploy_policy in client._client._transport._wrapped_methods ) @@ -11604,16 +11742,16 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_rollout + client._client._transport.get_deploy_policy ] = mock_rpc request = {} - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11621,8 +11759,8 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest +async def test_get_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11634,73 +11772,58 @@ async def test_get_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout(request) + response = await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_rollout_async_from_dict(): - await test_get_rollout_async(request_type=dict) +async def test_get_deploy_policy_async_from_dict(): + await test_get_deploy_policy_async(request_type=dict) -def test_get_rollout_field_headers(): +def test_get_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: - call.return_value = cloud_deploy.Rollout() - client.get_rollout(request) + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: + call.return_value = cloud_deploy.DeployPolicy() + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11716,23 +11839,25 @@ def test_get_rollout_field_headers(): @pytest.mark.asyncio -async def test_get_rollout_field_headers_async(): +async def test_get_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11747,18 +11872,20 @@ async def test_get_rollout_field_headers_async(): ) in kw["metadata"] -def test_get_rollout_flattened(): +def test_get_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_rollout( + client.get_deploy_policy( name="name_value", ) @@ -11771,7 +11898,7 @@ def test_get_rollout_flattened(): assert arg == mock_val -def test_get_rollout_flattened_error(): +def test_get_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11779,29 +11906,31 @@ def test_get_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_rollout( - cloud_deploy.GetRolloutRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_rollout_flattened_async(): +async def test_get_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_rollout( + response = await client.get_deploy_policy( name="name_value", ) @@ -11815,7 +11944,7 @@ async def test_get_rollout_flattened_async(): @pytest.mark.asyncio -async def test_get_rollout_flattened_error_async(): +async def test_get_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11823,8 +11952,8 @@ async def test_get_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_rollout( - cloud_deploy.GetRolloutRequest(), + await client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @@ -11832,11 +11961,11 @@ async def test_get_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateRolloutRequest, + cloud_deploy.ApproveRolloutRequest, dict, ], ) -def test_create_rollout(request_type, transport: str = "grpc"): +def test_approve_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11847,22 +11976,22 @@ def test_create_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_rollout(request) + call.return_value = cloud_deploy.ApproveRolloutResponse() + response = client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) -def test_create_rollout_empty_call(): +def test_approve_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11871,17 +12000,17 @@ def test_create_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout() + client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() -def test_create_rollout_non_empty_request_with_auto_populated_field(): +def test_approve_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11892,30 +12021,24 @@ def test_create_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + request = cloud_deploy.ApproveRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout(request=request) + client.approve_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + assert args[0] == cloud_deploy.ApproveRolloutRequest( + name="name_value", ) -def test_create_rollout_use_cached_wrapped_rpc(): +def test_approve_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11929,26 +12052,21 @@ def test_create_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_rollout in client._transport._wrapped_methods + assert client._transport.approve_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc + client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc request = {} - client.create_rollout(request) + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_rollout(request) + client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11956,7 +12074,7 @@ def test_create_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_rollout_empty_call_async(): +async def test_approve_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11965,19 +12083,19 @@ async def test_create_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout() + response = await client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() @pytest.mark.asyncio -async def test_create_rollout_async_use_cached_wrapped_rpc( +async def test_approve_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11994,7 +12112,7 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_rollout + client._client._transport.approve_rollout in client._client._transport._wrapped_methods ) @@ -12002,21 +12120,16 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_rollout + client._client._transport.approve_rollout ] = mock_rpc request = {} - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12024,8 +12137,8 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest +async def test_approve_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12037,43 +12150,43 @@ async def test_create_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout(request) + response = await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) @pytest.mark.asyncio -async def test_create_rollout_async_from_dict(): - await test_create_rollout_async(request_type=dict) +async def test_approve_rollout_async_from_dict(): + await test_approve_rollout_async(request_type=dict) -def test_create_rollout_field_headers(): +def test_approve_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_rollout(request) + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + call.return_value = cloud_deploy.ApproveRolloutResponse() + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12084,28 +12197,28 @@ def test_create_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_rollout_field_headers_async(): +async def test_approve_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ApproveRolloutResponse() ) - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12116,43 +12229,35 @@ async def test_create_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_rollout_flattened(): +def test_approve_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_rollout_flattened_error(): +def test_approve_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12160,53 +12265,43 @@ def test_create_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_rollout_flattened_async(): +async def test_approve_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + response = await client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_rollout_flattened_error_async(): +async def test_approve_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12214,22 +12309,20 @@ async def test_create_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + await client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.IgnoreJobRequest, + cloud_deploy.AdvanceRolloutRequest, dict, ], ) -def test_ignore_job(request_type, transport: str = "grpc"): +def test_advance_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12240,22 +12333,22 @@ def test_ignore_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() - response = client.ignore_job(request) + call.return_value = cloud_deploy.AdvanceRolloutResponse() + response = client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) -def test_ignore_job_empty_call(): +def test_advance_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12264,17 +12357,17 @@ def test_ignore_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job() + client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() -def test_ignore_job_non_empty_request_with_auto_populated_field(): +def test_advance_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12285,28 +12378,26 @@ def test_ignore_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + request = cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job(request=request) + client.advance_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + assert args[0] == cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) -def test_ignore_job_use_cached_wrapped_rpc(): +def test_advance_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12320,21 +12411,21 @@ def test_ignore_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.ignore_job in client._transport._wrapped_methods + assert client._transport.advance_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc + client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc request = {} - client.ignore_job(request) + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.ignore_job(request) + client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12342,7 +12433,7 @@ def test_ignore_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_ignore_job_empty_call_async(): +async def test_advance_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12351,19 +12442,21 @@ async def test_ignore_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job() + response = await client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() @pytest.mark.asyncio -async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_advance_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12378,7 +12471,7 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.ignore_job + client._client._transport.advance_rollout in client._client._transport._wrapped_methods ) @@ -12386,16 +12479,16 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.ignore_job + client._client._transport.advance_rollout ] = mock_rpc request = {} - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12403,8 +12496,8 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_ignore_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest +async def test_advance_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12416,43 +12509,43 @@ async def test_ignore_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job(request) + response = await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) @pytest.mark.asyncio -async def test_ignore_job_async_from_dict(): - await test_ignore_job_async(request_type=dict) +async def test_advance_rollout_async_from_dict(): + await test_advance_rollout_async(request_type=dict) -def test_ignore_job_field_headers(): +def test_advance_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: - call.return_value = cloud_deploy.IgnoreJobResponse() - client.ignore_job(request) + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + call.return_value = cloud_deploy.AdvanceRolloutResponse() + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12463,28 +12556,28 @@ def test_ignore_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_ignore_job_field_headers_async(): +async def test_advance_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12495,43 +12588,39 @@ async def test_ignore_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_ignore_job_flattened(): +def test_advance_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.ignore_job( - rollout="rollout_value", + client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val -def test_ignore_job_flattened_error(): +def test_advance_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12539,53 +12628,48 @@ def test_ignore_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.asyncio -async def test_ignore_job_flattened_async(): +async def test_advance_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.ignore_job( - rollout="rollout_value", + response = await client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_ignore_job_flattened_error_async(): +async def test_advance_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12593,22 +12677,21 @@ async def test_ignore_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + await client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RetryJobRequest, + cloud_deploy.CancelRolloutRequest, dict, ], ) -def test_retry_job(request_type, transport: str = "grpc"): +def test_cancel_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12619,22 +12702,22 @@ def test_retry_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() - response = client.retry_job(request) + call.return_value = cloud_deploy.CancelRolloutResponse() + response = client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) -def test_retry_job_empty_call(): +def test_cancel_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12643,17 +12726,17 @@ def test_retry_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job() + client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() -def test_retry_job_non_empty_request_with_auto_populated_field(): +def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12664,28 +12747,24 @@ def test_retry_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + request = cloud_deploy.CancelRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job(request=request) + client.cancel_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + assert args[0] == cloud_deploy.CancelRolloutRequest( + name="name_value", ) -def test_retry_job_use_cached_wrapped_rpc(): +def test_cancel_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12699,21 +12778,21 @@ def test_retry_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.retry_job in client._transport._wrapped_methods + assert client._transport.cancel_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc request = {} - client.retry_job(request) + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.retry_job(request) + client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12721,7 +12800,7 @@ def test_retry_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_retry_job_empty_call_async(): +async def test_cancel_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12730,19 +12809,21 @@ async def test_retry_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job() + response = await client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() @pytest.mark.asyncio -async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_cancel_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12757,7 +12838,7 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.retry_job + client._client._transport.cancel_rollout in client._client._transport._wrapped_methods ) @@ -12765,16 +12846,16 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.retry_job + client._client._transport.cancel_rollout ] = mock_rpc request = {} - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12782,8 +12863,8 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_retry_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest +async def test_cancel_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12795,43 +12876,43 @@ async def test_retry_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job(request) + response = await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) @pytest.mark.asyncio -async def test_retry_job_async_from_dict(): - await test_retry_job_async(request_type=dict) +async def test_cancel_rollout_async_from_dict(): + await test_cancel_rollout_async(request_type=dict) -def test_retry_job_field_headers(): +def test_cancel_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: - call.return_value = cloud_deploy.RetryJobResponse() - client.retry_job(request) + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + call.return_value = cloud_deploy.CancelRolloutResponse() + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12842,28 +12923,28 @@ def test_retry_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_retry_job_field_headers_async(): +async def test_cancel_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12874,43 +12955,35 @@ async def test_retry_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_retry_job_flattened(): +def test_cancel_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_retry_job_flattened_error(): +def test_cancel_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12918,53 +12991,43 @@ def test_retry_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_retry_job_flattened_async(): +async def test_cancel_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + response = await client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_retry_job_flattened_error_async(): +async def test_cancel_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12972,22 +13035,20 @@ async def test_retry_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + await client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListJobRunsRequest, + cloud_deploy.ListRolloutsRequest, dict, ], ) -def test_list_job_runs(request_type, transport: str = "grpc"): +def test_list_rollouts(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12998,27 +13059,27 @@ def test_list_job_runs(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse( + call.return_value = cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_job_runs(request) + response = client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsPager) + assert isinstance(response, pagers.ListRolloutsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_job_runs_empty_call(): +def test_list_rollouts_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13027,17 +13088,17 @@ def test_list_job_runs_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs() + client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() -def test_list_job_runs_non_empty_request_with_auto_populated_field(): +def test_list_rollouts_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13048,7 +13109,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListJobRunsRequest( + request = cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -13056,14 +13117,14 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs(request=request) + client.list_rollouts(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest( + assert args[0] == cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -13071,7 +13132,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) -def test_list_job_runs_use_cached_wrapped_rpc(): +def test_list_rollouts_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13085,21 +13146,21 @@ def test_list_job_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_job_runs in client._transport._wrapped_methods + assert client._transport.list_rollouts in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc request = {} - client.list_job_runs(request) + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_job_runs(request) + client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13107,7 +13168,7 @@ def test_list_job_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_job_runs_empty_call_async(): +async def test_list_rollouts_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13116,22 +13177,22 @@ async def test_list_job_runs_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs() + response = await client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() @pytest.mark.asyncio -async def test_list_job_runs_async_use_cached_wrapped_rpc( +async def test_list_rollouts_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13148,7 +13209,7 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_job_runs + client._client._transport.list_rollouts in client._client._transport._wrapped_methods ) @@ -13156,16 +13217,16 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_job_runs + client._client._transport.list_rollouts ] = mock_rpc request = {} - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13173,8 +13234,8 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_job_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest +async def test_list_rollouts_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13186,48 +13247,48 @@ async def test_list_job_runs_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs(request) + response = await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert isinstance(response, pagers.ListRolloutsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_job_runs_async_from_dict(): - await test_list_job_runs_async(request_type=dict) +async def test_list_rollouts_async_from_dict(): + await test_list_rollouts_async(request_type=dict) -def test_list_job_runs_field_headers(): +def test_list_rollouts_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: - call.return_value = cloud_deploy.ListJobRunsResponse() - client.list_job_runs(request) + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + call.return_value = cloud_deploy.ListRolloutsResponse() + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13243,23 +13304,23 @@ def test_list_job_runs_field_headers(): @pytest.mark.asyncio -async def test_list_job_runs_field_headers_async(): +async def test_list_rollouts_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13274,18 +13335,18 @@ async def test_list_job_runs_field_headers_async(): ) in kw["metadata"] -def test_list_job_runs_flattened(): +def test_list_rollouts_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_job_runs( + client.list_rollouts( parent="parent_value", ) @@ -13298,7 +13359,7 @@ def test_list_job_runs_flattened(): assert arg == mock_val -def test_list_job_runs_flattened_error(): +def test_list_rollouts_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13306,29 +13367,29 @@ def test_list_job_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_job_runs_flattened_async(): +async def test_list_rollouts_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_job_runs( + response = await client.list_rollouts( parent="parent_value", ) @@ -13342,7 +13403,7 @@ async def test_list_job_runs_flattened_async(): @pytest.mark.asyncio -async def test_list_job_runs_flattened_error_async(): +async def test_list_rollouts_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13350,44 +13411,44 @@ async def test_list_job_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + await client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) -def test_list_job_runs_pager(transport_name: str = "grpc"): +def test_list_rollouts_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13399,7 +13460,7 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_job_runs(request={}, retry=retry, timeout=timeout) + pager = client.list_rollouts(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -13407,89 +13468,89 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + assert all(isinstance(i, cloud_deploy.Rollout) for i in results) -def test_list_job_runs_pages(transport_name: str = "grpc"): +def test_list_rollouts_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - pages = list(client.list_job_runs(request={}).pages) + pages = list(client.list_rollouts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_job_runs_async_pager(): +async def test_list_rollouts_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - async_pager = await client.list_job_runs( + async_pager = await client.list_rollouts( request={}, ) assert async_pager.next_page_token == "abc" @@ -13498,43 +13559,43 @@ async def test_list_job_runs_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) @pytest.mark.asyncio -async def test_list_job_runs_async_pages(): +async def test_list_rollouts_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13543,7 +13604,7 @@ async def test_list_job_runs_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_job_runs(request={}) + await client.list_rollouts(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -13553,11 +13614,11 @@ async def test_list_job_runs_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetJobRunRequest, + cloud_deploy.GetRolloutRequest, dict, ], ) -def test_get_job_run(request_type, transport: str = "grpc"): +def test_get_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13568,35 +13629,52 @@ def test_get_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun( + call.return_value = cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_job_run(request) + response = client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_job_run_empty_call(): +def test_get_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13605,17 +13683,17 @@ def test_get_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run() + client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() -def test_get_job_run_non_empty_request_with_auto_populated_field(): +def test_get_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13626,24 +13704,24 @@ def test_get_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetJobRunRequest( + request = cloud_deploy.GetRolloutRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run(request=request) + client.get_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest( + assert args[0] == cloud_deploy.GetRolloutRequest( name="name_value", ) -def test_get_job_run_use_cached_wrapped_rpc(): +def test_get_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13657,21 +13735,21 @@ def test_get_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job_run in client._transport._wrapped_methods + assert client._transport.get_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc request = {} - client.get_job_run(request) + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job_run(request) + client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13679,7 +13757,7 @@ def test_get_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_job_run_empty_call_async(): +async def test_get_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13688,26 +13766,33 @@ async def test_get_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run() + response = await client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() @pytest.mark.asyncio -async def test_get_job_run_async_use_cached_wrapped_rpc( +async def test_get_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13724,7 +13809,7 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_job_run + client._client._transport.get_rollout in client._client._transport._wrapped_methods ) @@ -13732,16 +13817,16 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_job_run + client._client._transport.get_rollout ] = mock_rpc request = {} - await client.get_job_run(request) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_job_run(request) + await client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13749,8 +13834,8 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest +async def test_get_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13762,56 +13847,73 @@ async def test_get_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run(request) + response = await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_job_run_async_from_dict(): - await test_get_job_run_async(request_type=dict) +async def test_get_rollout_async_from_dict(): + await test_get_rollout_async(request_type=dict) -def test_get_job_run_field_headers(): +def test_get_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = cloud_deploy.JobRun() - client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = cloud_deploy.Rollout() + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13827,21 +13929,23 @@ def test_get_job_run_field_headers(): @pytest.mark.asyncio -async def test_get_job_run_field_headers_async(): +async def test_get_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) - await client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13856,18 +13960,18 @@ async def test_get_job_run_field_headers_async(): ) in kw["metadata"] -def test_get_job_run_flattened(): +def test_get_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_job_run( + client.get_rollout( name="name_value", ) @@ -13880,7 +13984,7 @@ def test_get_job_run_flattened(): assert arg == mock_val -def test_get_job_run_flattened_error(): +def test_get_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13888,27 +13992,29 @@ def test_get_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job_run( - cloud_deploy.GetJobRunRequest(), + client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_job_run_flattened_async(): +async def test_get_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_job_run( + response = await client.get_rollout( name="name_value", ) @@ -13922,7 +14028,7 @@ async def test_get_job_run_flattened_async(): @pytest.mark.asyncio -async def test_get_job_run_flattened_error_async(): +async def test_get_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13930,8 +14036,8 @@ async def test_get_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_job_run( - cloud_deploy.GetJobRunRequest(), + await client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @@ -13939,11 +14045,11 @@ async def test_get_job_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.TerminateJobRunRequest, + cloud_deploy.CreateRolloutRequest, dict, ], ) -def test_terminate_job_run(request_type, transport: str = "grpc"): +def test_create_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13954,24 +14060,22 @@ def test_terminate_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() - response = client.terminate_job_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) -def test_terminate_job_run_empty_call(): +def test_create_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13980,19 +14084,17 @@ def test_terminate_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run() + client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() -def test_terminate_job_run_non_empty_request_with_auto_populated_field(): +def test_create_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14003,26 +14105,30 @@ def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.TerminateJobRunRequest( - name="name_value", + request = cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run(request=request) + client.create_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) -def test_terminate_job_run_use_cached_wrapped_rpc(): +def test_create_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14036,23 +14142,26 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.terminate_job_run in client._transport._wrapped_methods + assert client._transport.create_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.terminate_job_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc request = {} - client.terminate_job_run(request) + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.terminate_job_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14060,7 +14169,7 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_terminate_job_run_empty_call_async(): +async def test_create_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14069,21 +14178,19 @@ async def test_terminate_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run() + response = await client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() @pytest.mark.asyncio -async def test_terminate_job_run_async_use_cached_wrapped_rpc( +async def test_create_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14100,7 +14207,7 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.terminate_job_run + client._client._transport.create_rollout in client._client._transport._wrapped_methods ) @@ -14108,16 +14215,21 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.terminate_job_run + client._client._transport.create_rollout ] = mock_rpc request = {} - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.terminate_job_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14125,8 +14237,8 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_terminate_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest +async def test_create_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14138,47 +14250,43 @@ async def test_terminate_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run(request) + response = await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_terminate_job_run_async_from_dict(): - await test_terminate_job_run_async(request_type=dict) +async def test_create_rollout_async_from_dict(): + await test_create_rollout_async(request_type=dict) -def test_terminate_job_run_field_headers(): +def test_create_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: - call.return_value = cloud_deploy.TerminateJobRunResponse() - client.terminate_job_run(request) + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14189,30 +14297,28 @@ def test_terminate_job_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_terminate_job_run_field_headers_async(): +async def test_create_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14223,37 +14329,43 @@ async def test_terminate_job_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_terminate_job_run_flattened(): +def test_create_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.terminate_job_run( - name="name_value", + client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val -def test_terminate_job_run_flattened_error(): +def test_create_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14261,45 +14373,53 @@ def test_terminate_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.asyncio -async def test_terminate_job_run_flattened_async(): +async def test_create_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.terminate_job_run( - name="name_value", + response = await client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_terminate_job_run_flattened_error_async(): +async def test_create_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14307,20 +14427,22 @@ async def test_terminate_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + await client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetConfigRequest, + cloud_deploy.IgnoreJobRequest, dict, ], ) -def test_get_config(request_type, transport: str = "grpc"): +def test_ignore_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14331,27 +14453,22 @@ def test_get_config(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) - response = client.get_config(request) + call.return_value = cloud_deploy.IgnoreJobResponse() + response = client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) -def test_get_config_empty_call(): +def test_ignore_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14360,17 +14477,17 @@ def test_get_config_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config() + client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() -def test_get_config_non_empty_request_with_auto_populated_field(): +def test_ignore_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14381,24 +14498,28 @@ def test_get_config_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetConfigRequest( - name="name_value", + request = cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config(request=request) + client.ignore_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest( - name="name_value", + assert args[0] == cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_get_config_use_cached_wrapped_rpc(): +def test_ignore_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14412,21 +14533,21 @@ def test_get_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_config in client._transport._wrapped_methods + assert client._transport.ignore_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_config] = mock_rpc + client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc request = {} - client.get_config(request) + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_config(request) + client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14434,7 +14555,7 @@ def test_get_config_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_config_empty_call_async(): +async def test_ignore_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14443,22 +14564,19 @@ async def test_get_config_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config() + response = await client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() @pytest.mark.asyncio -async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14473,7 +14591,7 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_config + client._client._transport.ignore_job in client._client._transport._wrapped_methods ) @@ -14481,16 +14599,16 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_config + client._client._transport.ignore_job ] = mock_rpc request = {} - await client.get_config(request) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_config(request) + await client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14498,8 +14616,8 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_get_config_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest +async def test_ignore_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14511,48 +14629,43 @@ async def test_get_config_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config(request) + response = await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) @pytest.mark.asyncio -async def test_get_config_async_from_dict(): - await test_get_config_async(request_type=dict) +async def test_ignore_job_async_from_dict(): + await test_ignore_job_async(request_type=dict) -def test_get_config_field_headers(): +def test_ignore_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = cloud_deploy.Config() - client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = cloud_deploy.IgnoreJobResponse() + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14563,26 +14676,28 @@ def test_get_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_config_field_headers_async(): +async def test_ignore_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) - await client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14593,35 +14708,43 @@ async def test_get_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_get_config_flattened(): +def test_ignore_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_config( - name="name_value", + client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_get_config_flattened_error(): +def test_ignore_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14629,41 +14752,53 @@ def test_get_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_get_config_flattened_async(): +async def test_ignore_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_config( - name="name_value", + response = await client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_config_flattened_error_async(): +async def test_ignore_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14671,20 +14806,22 @@ async def test_get_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + await client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateAutomationRequest, + cloud_deploy.RetryJobRequest, dict, ], ) -def test_create_automation(request_type, transport: str = "grpc"): +def test_retry_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14695,24 +14832,22 @@ def test_create_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_automation(request) + call.return_value = cloud_deploy.RetryJobResponse() + response = client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) -def test_create_automation_empty_call(): +def test_retry_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14721,19 +14856,17 @@ def test_create_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation() + client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() -def test_create_automation_non_empty_request_with_auto_populated_field(): +def test_retry_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14744,30 +14877,28 @@ def test_create_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + request = cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation(request=request) + client.retry_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + assert args[0] == cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_create_automation_use_cached_wrapped_rpc(): +def test_retry_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14781,28 +14912,21 @@ def test_create_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_automation in client._transport._wrapped_methods + assert client._transport.retry_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc request = {} - client.create_automation(request) + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_automation(request) + client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14810,7 +14934,7 @@ def test_create_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_automation_empty_call_async(): +async def test_retry_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14819,23 +14943,19 @@ async def test_create_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation() + response = await client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() @pytest.mark.asyncio -async def test_create_automation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14850,7 +14970,7 @@ async def test_create_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_automation + client._client._transport.retry_job in client._client._transport._wrapped_methods ) @@ -14858,21 +14978,16 @@ async def test_create_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_automation + client._client._transport.retry_job ] = mock_rpc request = {} - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_automation(request) + await client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14880,8 +14995,8 @@ async def test_create_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest +async def test_retry_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14893,47 +15008,43 @@ async def test_create_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation(request) + response = await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) @pytest.mark.asyncio -async def test_create_automation_async_from_dict(): - await test_create_automation_async(request_type=dict) +async def test_retry_job_async_from_dict(): + await test_retry_job_async(request_type=dict) -def test_create_automation_field_headers(): +def test_retry_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_automation(request) + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + call.return_value = cloud_deploy.RetryJobResponse() + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14944,30 +15055,28 @@ def test_create_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_automation_field_headers_async(): +async def test_retry_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.RetryJobResponse() ) - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14978,45 +15087,43 @@ async def test_create_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_create_automation_flattened(): +def test_retry_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_create_automation_flattened_error(): +def test_retry_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15024,55 +15131,53 @@ def test_create_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_create_automation_flattened_async(): +async def test_retry_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + response = await client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_automation_flattened_error_async(): +async def test_retry_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15080,22 +15185,22 @@ async def test_create_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + await client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateAutomationRequest, + cloud_deploy.ListJobRunsRequest, dict, ], ) -def test_update_automation(request_type, transport: str = "grpc"): +def test_list_job_runs(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15106,24 +15211,27 @@ def test_update_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_automation(request) + call.return_value = cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_automation_empty_call(): +def test_list_job_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15132,19 +15240,17 @@ def test_update_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation() + client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() -def test_update_automation_non_empty_request_with_auto_populated_field(): +def test_list_job_runs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15155,26 +15261,30 @@ def test_update_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + request = cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation(request=request) + client.list_job_runs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + assert args[0] == cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_update_automation_use_cached_wrapped_rpc(): +def test_list_job_runs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15188,28 +15298,21 @@ def test_update_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_automation in client._transport._wrapped_methods + assert client._transport.list_job_runs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc request = {} - client.update_automation(request) + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_automation(request) + client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15217,7 +15320,7 @@ def test_update_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_automation_empty_call_async(): +async def test_list_job_runs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15226,21 +15329,22 @@ async def test_update_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation() + response = await client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() @pytest.mark.asyncio -async def test_update_automation_async_use_cached_wrapped_rpc( +async def test_list_job_runs_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15257,7 +15361,7 @@ async def test_update_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_automation + client._client._transport.list_job_runs in client._client._transport._wrapped_methods ) @@ -15265,21 +15369,16 @@ async def test_update_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_automation + client._client._transport.list_job_runs ] = mock_rpc request = {} - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_automation(request) + await client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15287,8 +15386,8 @@ async def test_update_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest +async def test_list_job_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15300,47 +15399,48 @@ async def test_update_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation(request) + response = await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_automation_async_from_dict(): - await test_update_automation_async(request_type=dict) +async def test_list_job_runs_async_from_dict(): + await test_list_job_runs_async(request_type=dict) -def test_update_automation_field_headers(): +def test_list_job_runs_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_automation(request) + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + call.return_value = cloud_deploy.ListJobRunsResponse() + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15351,30 +15451,28 @@ def test_update_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_automation_field_headers_async(): +async def test_list_job_runs_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ListJobRunsResponse() ) - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15385,41 +15483,35 @@ async def test_update_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_automation_flattened(): +def test_list_job_runs_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_automation_flattened_error(): +def test_list_job_runs_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15427,50 +15519,43 @@ def test_update_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_automation_flattened_async(): +async def test_list_job_runs_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_automation_flattened_error_async(): +async def test_list_job_runs_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15478,21 +15563,214 @@ async def test_update_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", + ) + + +def test_list_job_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_job_runs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + + +def test_list_job_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_job_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteAutomationRequest, + cloud_deploy.GetJobRunRequest, dict, ], ) -def test_delete_automation(request_type, transport: str = "grpc"): +def test_get_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15503,24 +15781,35 @@ def test_delete_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_automation(request) + call.return_value = cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) + response = client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" -def test_delete_automation_empty_call(): +def test_get_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15529,19 +15818,17 @@ def test_delete_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation() + client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() -def test_delete_automation_non_empty_request_with_auto_populated_field(): +def test_get_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15552,30 +15839,24 @@ def test_delete_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.DeleteAutomationRequest( + request = cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation(request=request) + client.get_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest( + assert args[0] == cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) -def test_delete_automation_use_cached_wrapped_rpc(): +def test_get_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15589,28 +15870,21 @@ def test_delete_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_automation in client._transport._wrapped_methods + assert client._transport.get_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc request = {} - client.delete_automation(request) + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_automation(request) + client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15618,7 +15892,7 @@ def test_delete_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_automation_empty_call_async(): +async def test_get_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15627,21 +15901,26 @@ async def test_delete_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation() + response = await client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() @pytest.mark.asyncio -async def test_delete_automation_async_use_cached_wrapped_rpc( +async def test_get_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15658,7 +15937,7 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_automation + client._client._transport.get_job_run in client._client._transport._wrapped_methods ) @@ -15666,21 +15945,16 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_automation + client._client._transport.get_job_run ] = mock_rpc request = {} - await client.delete_automation(request) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_automation(request) + await client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15688,8 +15962,8 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest +async def test_get_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15701,47 +15975,56 @@ async def test_delete_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation(request) + response = await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" @pytest.mark.asyncio -async def test_delete_automation_async_from_dict(): - await test_delete_automation_async(request_type=dict) +async def test_get_job_run_async_from_dict(): + await test_get_job_run_async(request_type=dict) -def test_delete_automation_field_headers(): +def test_get_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = cloud_deploy.JobRun() + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15757,25 +16040,21 @@ def test_delete_automation_field_headers(): @pytest.mark.asyncio -async def test_delete_automation_field_headers_async(): +async def test_get_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15790,20 +16069,18 @@ async def test_delete_automation_field_headers_async(): ) in kw["metadata"] -def test_delete_automation_flattened(): +def test_get_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_automation( + client.get_job_run( name="name_value", ) @@ -15816,7 +16093,7 @@ def test_delete_automation_flattened(): assert arg == mock_val -def test_delete_automation_flattened_error(): +def test_get_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15824,31 +16101,27 @@ def test_delete_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_automation_flattened_async(): +async def test_get_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_automation( + response = await client.get_job_run( name="name_value", ) @@ -15862,7 +16135,7 @@ async def test_delete_automation_flattened_async(): @pytest.mark.asyncio -async def test_delete_automation_flattened_error_async(): +async def test_get_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15870,8 +16143,8 @@ async def test_delete_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + await client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @@ -15879,11 +16152,11 @@ async def test_delete_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRequest, + cloud_deploy.TerminateJobRunRequest, dict, ], ) -def test_get_automation(request_type, transport: str = "grpc"): +def test_terminate_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15894,35 +16167,24 @@ def test_get_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) - response = client.get_automation(request) + call.return_value = cloud_deploy.TerminateJobRunResponse() + response = client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) -def test_get_automation_empty_call(): +def test_terminate_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15931,17 +16193,19 @@ def test_get_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation() + client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() -def test_get_automation_non_empty_request_with_auto_populated_field(): +def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15952,24 +16216,26 @@ def test_get_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRequest( + request = cloud_deploy.TerminateJobRunRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation(request=request) + client.terminate_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest( + assert args[0] == cloud_deploy.TerminateJobRunRequest( name="name_value", ) -def test_get_automation_use_cached_wrapped_rpc(): +def test_terminate_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15983,21 +16249,23 @@ def test_get_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_automation in client._transport._wrapped_methods + assert client._transport.terminate_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc + client._transport._wrapped_methods[ + client._transport.terminate_job_run + ] = mock_rpc request = {} - client.get_automation(request) + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation(request) + client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16005,7 +16273,7 @@ def test_get_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_empty_call_async(): +async def test_terminate_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16014,26 +16282,21 @@ async def test_get_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation() + response = await client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() @pytest.mark.asyncio -async def test_get_automation_async_use_cached_wrapped_rpc( +async def test_terminate_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16050,7 +16313,7 @@ async def test_get_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation + client._client._transport.terminate_job_run in client._client._transport._wrapped_methods ) @@ -16058,16 +16321,16 @@ async def test_get_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation + client._client._transport.terminate_job_run ] = mock_rpc request = {} - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16075,8 +16338,8 @@ async def test_get_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +async def test_terminate_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16088,56 +16351,47 @@ async def test_get_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation(request) + response = await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) @pytest.mark.asyncio -async def test_get_automation_async_from_dict(): - await test_get_automation_async(request_type=dict) +async def test_terminate_job_run_async_from_dict(): + await test_terminate_job_run_async(request_type=dict) -def test_get_automation_field_headers(): +def test_terminate_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: - call.return_value = cloud_deploy.Automation() - client.get_automation(request) + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: + call.return_value = cloud_deploy.TerminateJobRunResponse() + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16153,23 +16407,25 @@ def test_get_automation_field_headers(): @pytest.mark.asyncio -async def test_get_automation_field_headers_async(): +async def test_terminate_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16184,18 +16440,20 @@ async def test_get_automation_field_headers_async(): ) in kw["metadata"] -def test_get_automation_flattened(): +def test_terminate_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation( + client.terminate_job_run( name="name_value", ) @@ -16208,7 +16466,7 @@ def test_get_automation_flattened(): assert arg == mock_val -def test_get_automation_flattened_error(): +def test_terminate_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16216,29 +16474,31 @@ def test_get_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation( - cloud_deploy.GetAutomationRequest(), + client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_automation_flattened_async(): +async def test_terminate_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation( + response = await client.terminate_job_run( name="name_value", ) @@ -16252,7 +16512,7 @@ async def test_get_automation_flattened_async(): @pytest.mark.asyncio -async def test_get_automation_flattened_error_async(): +async def test_terminate_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16260,8 +16520,8 @@ async def test_get_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation( - cloud_deploy.GetAutomationRequest(), + await client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @@ -16269,11 +16529,11 @@ async def test_get_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationsRequest, + cloud_deploy.GetConfigRequest, dict, ], ) -def test_list_automations(request_type, transport: str = "grpc"): +def test_get_config(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16284,27 +16544,27 @@ def test_list_automations(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) - response = client.list_automations(request) + response = client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" -def test_list_automations_empty_call(): +def test_get_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16313,17 +16573,17 @@ def test_list_automations_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations() + client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() -def test_list_automations_non_empty_request_with_auto_populated_field(): +def test_get_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16334,30 +16594,24 @@ def test_list_automations_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.GetConfigRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations(request=request) + client.get_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.GetConfigRequest( + name="name_value", ) -def test_list_automations_use_cached_wrapped_rpc(): +def test_get_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16371,23 +16625,21 @@ def test_list_automations_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_automations in client._transport._wrapped_methods + assert client._transport.get_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_automations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_config] = mock_rpc request = {} - client.list_automations(request) + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automations(request) + client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16395,7 +16647,7 @@ def test_list_automations_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automations_empty_call_async(): +async def test_get_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16404,24 +16656,22 @@ async def test_list_automations_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations() + response = await client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() @pytest.mark.asyncio -async def test_list_automations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -16436,7 +16686,7 @@ async def test_list_automations_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automations + client._client._transport.get_config in client._client._transport._wrapped_methods ) @@ -16444,16 +16694,16 @@ async def test_list_automations_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_automations + client._client._transport.get_config ] = mock_rpc request = {} - await client.list_automations(request) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_automations(request) + await client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16461,8 +16711,8 @@ async def test_list_automations_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_automations_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +async def test_get_config_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16474,48 +16724,48 @@ async def test_list_automations_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations(request) + response = await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" @pytest.mark.asyncio -async def test_list_automations_async_from_dict(): - await test_list_automations_async(request_type=dict) +async def test_get_config_async_from_dict(): + await test_get_config_async(request_type=dict) -def test_list_automations_field_headers(): +def test_get_config_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = cloud_deploy.ListAutomationsResponse() - client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = cloud_deploy.Config() + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16526,28 +16776,26 @@ def test_list_automations_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automations_field_headers_async(): +async def test_get_config_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) - await client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16558,35 +16806,35 @@ async def test_list_automations_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_automations_flattened(): +def test_get_config_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automations( - parent="parent_value", + client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_automations_flattened_error(): +def test_get_config_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16594,43 +16842,41 @@ def test_list_automations_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", + client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_automations_flattened_async(): +async def test_get_config_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automations( - parent="parent_value", + response = await client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_automations_flattened_error_async(): +async def test_get_config_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16638,214 +16884,20 @@ async def test_list_automations_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", - ) - - -def test_list_automations_pager(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in results) - - -def test_list_automations_pages(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_automations(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automations_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automations_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, + await client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRunRequest, + cloud_deploy.CreateAutomationRequest, dict, ], ) -def test_get_automation_run(request_type, transport: str = "grpc"): +def test_create_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16857,40 +16909,23 @@ def test_get_automation_run(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) - response = client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) -def test_get_automation_run_empty_call(): +def test_create_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16900,18 +16935,18 @@ def test_get_automation_run_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run() + client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() -def test_get_automation_run_non_empty_request_with_auto_populated_field(): +def test_create_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16922,26 +16957,30 @@ def test_get_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRunRequest( - name="name_value", + request = cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run(request=request) + client.create_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) -def test_get_automation_run_use_cached_wrapped_rpc(): +def test_create_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16955,9 +16994,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_automation_run in client._transport._wrapped_methods - ) + assert client._transport.create_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16965,15 +17002,20 @@ def test_get_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_automation_run + client._transport.create_automation ] = mock_rpc request = {} - client.get_automation_run(request) + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16981,7 +17023,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_run_empty_call_async(): +async def test_create_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16991,29 +17033,20 @@ async def test_get_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run() + response = await client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() @pytest.mark.asyncio -async def test_get_automation_run_async_use_cached_wrapped_rpc( +async def test_create_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17030,7 +17063,7 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation_run + client._client._transport.create_automation in client._client._transport._wrapped_methods ) @@ -17038,16 +17071,21 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation_run + client._client._transport.create_automation ] = mock_rpc request = {} - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17055,8 +17093,8 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_automation_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +async def test_create_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17069,63 +17107,46 @@ async def test_get_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run(request) + response = await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_automation_run_async_from_dict(): - await test_get_automation_run_async(request_type=dict) +async def test_create_automation_async_from_dict(): + await test_create_automation_async(request_type=dict) -def test_get_automation_run_field_headers(): +def test_create_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: - call.return_value = cloud_deploy.AutomationRun() - client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17136,30 +17157,30 @@ def test_get_automation_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_automation_run_field_headers_async(): +async def test_create_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/op") ) - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17170,37 +17191,45 @@ async def test_get_automation_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_automation_run_flattened(): +def test_create_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation_run( - name="name_value", + client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val -def test_get_automation_run_flattened_error(): +def test_create_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17208,45 +17237,55 @@ def test_get_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.asyncio -async def test_get_automation_run_flattened_async(): +async def test_create_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation_run( - name="name_value", + response = await client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_automation_run_flattened_error_async(): +async def test_create_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17254,20 +17293,22 @@ async def test_get_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + await client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationRunsRequest, + cloud_deploy.UpdateAutomationRequest, dict, ], ) -def test_list_automation_runs(request_type, transport: str = "grpc"): +def test_update_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17279,28 +17320,23 @@ def test_list_automation_runs(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_automation_runs_empty_call(): +def test_update_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -17310,18 +17346,18 @@ def test_list_automation_runs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs() + client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() -def test_list_automation_runs_non_empty_request_with_auto_populated_field(): +def test_update_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17332,32 +17368,26 @@ def test_list_automation_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs(request=request) + client.update_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) -def test_list_automation_runs_use_cached_wrapped_rpc(): +def test_update_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17371,9 +17401,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_automation_runs in client._transport._wrapped_methods - ) + assert client._transport.update_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17381,15 +17409,20 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_automation_runs + client._transport.update_automation ] = mock_rpc request = {} - client.list_automation_runs(request) + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automation_runs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17397,7 +17430,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automation_runs_empty_call_async(): +async def test_update_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17407,23 +17440,20 @@ async def test_list_automation_runs_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs() + response = await client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() @pytest.mark.asyncio -async def test_list_automation_runs_async_use_cached_wrapped_rpc( +async def test_update_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17440,7 +17470,7 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automation_runs + client._client._transport.update_automation in client._client._transport._wrapped_methods ) @@ -17448,16 +17478,21 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_automation_runs + client._client._transport.update_automation ] = mock_rpc request = {} - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_automation_runs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17465,8 +17500,8 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_automation_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +async def test_update_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17479,51 +17514,46 @@ async def test_list_automation_runs_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs(request) + response = await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_automation_runs_async_from_dict(): - await test_list_automation_runs_async(request_type=dict) +async def test_update_automation_async_from_dict(): + await test_update_automation_async(request_type=dict) -def test_list_automation_runs_field_headers(): +def test_update_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: - call.return_value = cloud_deploy.ListAutomationRunsResponse() - client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17534,30 +17564,30 @@ def test_list_automation_runs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automation_runs_field_headers_async(): +async def test_update_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17568,37 +17598,41 @@ async def test_list_automation_runs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] -def test_list_automation_runs_flattened(): +def test_update_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automation_runs( - parent="parent_value", + client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_automation_runs_flattened_error(): +def test_update_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17606,45 +17640,50 @@ def test_list_automation_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_automation_runs_flattened_async(): +async def test_update_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automation_runs( - parent="parent_value", + response = await client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_automation_runs_flattened_error_async(): +async def test_update_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17652,271 +17691,70 @@ async def test_list_automation_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + await client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_automation_runs_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteAutomationRequest, + dict, + ], +) +def test_delete_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automation_runs(request={}, retry=retry, timeout=timeout) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_automation(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.DeleteAutomationRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_automation_runs_pages(transport_name: str = "grpc"): +def test_delete_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_automation_runs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automation_runs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automation_runs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CancelAutomationRunRequest, - dict, - ], -) -def test_cancel_automation_run(request_type, transport: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() - response = client.cancel_automation_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) - - -def test_cancel_automation_run_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.cancel_automation_run() + client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() -def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): +def test_delete_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17927,26 +17765,30 @@ def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelAutomationRunRequest( + request = cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_automation_run(request=request) + client.delete_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest( + assert args[0] == cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_automation_run_use_cached_wrapped_rpc(): +def test_delete_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17960,10 +17802,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.cancel_automation_run - in client._transport._wrapped_methods - ) + assert client._transport.delete_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17971,15 +17810,20 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_automation_run + client._transport.delete_automation ] = mock_rpc request = {} - client.cancel_automation_run(request) + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17987,7 +17831,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_automation_run_empty_call_async(): +async def test_delete_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17997,20 +17841,20 @@ async def test_cancel_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run() + response = await client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() @pytest.mark.asyncio -async def test_cancel_automation_run_async_use_cached_wrapped_rpc( +async def test_delete_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -18027,7 +17871,7 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_automation_run + client._client._transport.delete_automation in client._client._transport._wrapped_methods ) @@ -18035,16 +17879,21 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_automation_run + client._client._transport.delete_automation ] = mock_rpc request = {} - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.cancel_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -18052,9 +17901,8 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_cancel_automation_run_async( - transport: str = "grpc_asyncio", - request_type=cloud_deploy.CancelAutomationRunRequest, +async def test_delete_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18067,46 +17915,46 @@ async def test_cancel_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run(request) + response = await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_automation_run_async_from_dict(): - await test_cancel_automation_run_async(request_type=dict) +async def test_delete_automation_async_from_dict(): + await test_delete_automation_async(request_type=dict) -def test_cancel_automation_run_field_headers(): +def test_delete_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - call.return_value = cloud_deploy.CancelAutomationRunResponse() - client.cancel_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -18122,25 +17970,25 @@ def test_cancel_automation_run_field_headers(): @pytest.mark.asyncio -async def test_cancel_automation_run_field_headers_async(): +async def test_delete_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -18155,20 +18003,20 @@ async def test_cancel_automation_run_field_headers_async(): ) in kw["metadata"] -def test_cancel_automation_run_flattened(): +def test_delete_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_automation_run( + client.delete_automation( name="name_value", ) @@ -18181,7 +18029,7 @@ def test_cancel_automation_run_flattened(): assert arg == mock_val -def test_cancel_automation_run_flattened_error(): +def test_delete_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18189,31 +18037,31 @@ def test_cancel_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_async(): +async def test_delete_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_automation_run( + response = await client.delete_automation( name="name_value", ) @@ -18227,7 +18075,7 @@ async def test_cancel_automation_run_flattened_async(): @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_error_async(): +async def test_delete_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18235,8 +18083,8 @@ async def test_cancel_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + await client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @@ -18244,52 +18092,103 @@ async def test_cancel_automation_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListDeliveryPipelinesRequest, + cloud_deploy.GetAutomationRequest, dict, ], ) -def test_list_delivery_pipelines_rest(request_type): +def test_get_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", ) + response = client.get_automation(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_delivery_pipelines(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeliveryPipelinesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" -def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): +def test_get_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() + + +def test_get_automation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + +def test_get_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -18297,137 +18196,4184 @@ def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_delivery_pipelines - in client._transport._wrapped_methods - ) + assert client._transport.get_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_delivery_pipelines - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc request = {} - client.list_delivery_pipelines(request) + client.get_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_delivery_pipelines(request) + client.get_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_delivery_pipelines_rest_required_fields( - request_type=cloud_deploy.ListDeliveryPipelinesRequest, -): - transport_class = transports.CloudDeployRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_get_automation_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_get_automation_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.get_automation + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation + ] = mock_rpc - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = {} + await client.get_automation(request) - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - response_value = Response() - response_value.status_code = 200 + await client.get_automation(request) - # Convert return value to protobuf type + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_automation_async_from_dict(): + await test_get_automation_async(request_type=dict) + + +def test_get_automation_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = cloud_deploy.Automation() + client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationsRequest, + dict, + ], +) +def test_list_automations(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +def test_list_automations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_automations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automations + ] = mock_rpc + request = {} + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +@pytest.mark.asyncio +async def test_list_automations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automations + ] = mock_rpc + + request = {} + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automations_async_from_dict(): + await test_list_automations_async(request_type=dict) + + +def test_list_automations_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = cloud_deploy.ListAutomationsResponse() + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automations_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automations_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automations_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automations_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automations_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +def test_list_automations_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in results) + + +def test_list_automations_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automations_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automations_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetAutomationRunRequest, + dict, + ], +) +def test_get_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + response = client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +def test_get_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +def test_get_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + +def test_get_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_automation_run in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_automation_run + ] = mock_rpc + request = {} + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_get_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation_run + ] = mock_rpc + + request = {} + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +@pytest.mark.asyncio +async def test_get_automation_run_async_from_dict(): + await test_get_automation_run_async(request_type=dict) + + +def test_get_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.AutomationRun() + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationRunsRequest, + dict, + ], +) +def test_list_automation_runs(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automation_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +def test_list_automation_runs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automation_runs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_automation_runs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automation_runs + ] = mock_rpc + request = {} + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automation_runs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automation_runs + ] = mock_rpc + + request = {} + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_from_dict(): + await test_list_automation_runs_async(request_type=dict) + + +def test_list_automation_runs_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = cloud_deploy.ListAutomationRunsResponse() + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automation_runs_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automation_runs_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +def test_list_automation_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automation_runs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + + +def test_list_automation_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automation_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automation_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automation_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CancelAutomationRunRequest, + dict, + ], +) +def test_cancel_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + response = client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +def test_cancel_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + +def test_cancel_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.cancel_automation_run + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_automation_run + ] = mock_rpc + request = {} + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_automation_run + ] = mock_rpc + + request = {} + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async( + transport: str = "grpc_asyncio", + request_type=cloud_deploy.CancelAutomationRunRequest, +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_from_dict(): + await test_cancel_automation_run_async(request_type=dict) + + +def test_cancel_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.CancelAutomationRunResponse() + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeliveryPipelinesRequest, + dict, + ], +) +def test_list_delivery_pipelines_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_delivery_pipelines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeliveryPipelinesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_delivery_pipelines + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_delivery_pipelines + ] = mock_rpc + + request = {} + client.list_delivery_pipelines(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_delivery_pipelines(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_delivery_pipelines_rest_required_fields( + request_type=cloud_deploy.ListDeliveryPipelinesRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_delivery_pipelines(request) + response = client.list_delivery_pipelines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_delivery_pipelines_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_delivery_pipelines_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( + cloud_deploy.ListDeliveryPipelinesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( + cloud_deploy.ListDeliveryPipelinesResponse() + ) + + request = cloud_deploy.ListDeliveryPipelinesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + client.list_delivery_pipelines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_delivery_pipelines_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_delivery_pipelines(request) + + +def test_list_delivery_pipelines_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_delivery_pipelines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_delivery_pipelines( + cloud_deploy.ListDeliveryPipelinesRequest(), + parent="parent_value", + ) + + +def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[], + next_page_token="def", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_delivery_pipelines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) + + pages = list(client.list_delivery_pipelines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetDeliveryPipelineRequest, + dict, + ], +) +def test_get_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.DeliveryPipeline) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + + +def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_delivery_pipeline + ] = mock_rpc + + request = {} + client.get_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.GetDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( + cloud_deploy.GetDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( + cloud_deploy.DeliveryPipeline() + ) + + request = cloud_deploy.GetDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.DeliveryPipeline() + + client.get_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_delivery_pipeline(request) + + +def test_get_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_delivery_pipeline( + cloud_deploy.GetDeliveryPipelineRequest(), + name="name_value", + ) + + +def test_get_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateDeliveryPipelineRequest, + dict, + ], +) +def test_create_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["delivery_pipeline"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_delivery_pipeline + ] = mock_rpc + + request = {} + client.create_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.CreateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["delivery_pipeline_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "deliveryPipelineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deliveryPipelineId" in jsonified_request + assert ( + jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delivery_pipeline_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deliveryPipelineId" in jsonified_request + assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_delivery_pipeline(request) + + expected_params = [ + ( + "deliveryPipelineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deliveryPipelineId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deliveryPipelineId", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( + cloud_deploy.CreateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_delivery_pipeline(request) + + +def test_create_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_delivery_pipeline( + cloud_deploy.CreateDeliveryPipelineRequest(), + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + + +def test_create_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.UpdateDeliveryPipelineRequest, + dict, + ], +) +def test_update_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request_init["delivery_pipeline"] = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_delivery_pipeline + ] = mock_rpc + + request = {} + client.update_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.UpdateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( + cloud_deploy.UpdateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.UpdateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_delivery_pipeline(request) + + +def test_update_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_delivery_pipeline( + cloud_deploy.UpdateDeliveryPipelineRequest(), + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteDeliveryPipelineRequest, + dict, + ], +) +def test_delete_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_delivery_pipeline + ] = mock_rpc + + request = {} + client.delete_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_delivery_pipeline(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_delivery_pipelines_rest_unset_required_fields(): +def test_delete_delivery_pipeline_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_delivery_pipelines_rest_interceptors(null_interceptor): +def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18440,14 +22386,16 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( - cloud_deploy.ListDeliveryPipelinesRequest() + pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( + cloud_deploy.DeleteDeliveryPipelineRequest() ) transcode.return_value = { "method": "post", @@ -18459,19 +22407,19 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( - cloud_deploy.ListDeliveryPipelinesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListDeliveryPipelinesRequest() + request = cloud_deploy.DeleteDeliveryPipelineRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + post.return_value = operations_pb2.Operation() - client.list_delivery_pipelines( + client.delete_delivery_pipeline( request, metadata=[ ("key", "val"), @@ -18483,8 +22431,8 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_delivery_pipelines_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +def test_delete_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18492,7 +22440,9 @@ def test_list_delivery_pipelines_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18504,10 +22454,10 @@ def test_list_delivery_pipelines_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_delivery_pipelines(request) + client.delete_delivery_pipeline(request) -def test_list_delivery_pipelines_rest_flattened(): +def test_delete_delivery_pipeline_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18516,40 +22466,40 @@ def test_list_delivery_pipelines_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_delivery_pipelines(**mock_args) + client.delete_delivery_pipeline(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" % client.transport._host, args[1], ) -def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18558,126 +22508,61 @@ def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_delivery_pipelines( - cloud_deploy.ListDeliveryPipelinesRequest(), - parent="parent_value", + client.delete_delivery_pipeline( + cloud_deploy.DeleteDeliveryPipelineRequest(), + name="name_value", ) -def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="abc", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[], - next_page_token="def", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_delivery_pipelines(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) - - pages = list(client.list_delivery_pipelines(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetDeliveryPipelineRequest, + cloud_deploy.ListTargetsRequest, dict, ], ) -def test_get_delivery_pipeline_rest(request_type): +def test_list_targets_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, + return_value = cloud_deploy.ListTargetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.list_targets(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.DeliveryPipeline) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True + assert isinstance(response, pagers.ListTargetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_list_targets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18691,40 +22576,35 @@ def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.list_targets in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc request = {} - client.get_delivery_pipeline(request) + client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_delivery_pipeline(request) + client.list_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.GetDeliveryPipelineRequest, +def test_list_targets_rest_required_fields( + request_type=cloud_deploy.ListTargetsRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18735,21 +22615,30 @@ def test_get_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18758,7 +22647,7 @@ def test_get_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = cloud_deploy.ListTargetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18779,30 +22668,40 @@ def test_get_delivery_pipeline_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.list_targets(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_delivery_pipeline_rest_unset_required_fields(): +def test_list_targets_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_targets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_delivery_pipeline_rest_interceptors(null_interceptor): +def test_list_targets_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18815,14 +22714,14 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_list_targets" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_list_targets" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( - cloud_deploy.GetDeliveryPipelineRequest() + pb_message = cloud_deploy.ListTargetsRequest.pb( + cloud_deploy.ListTargetsRequest() ) transcode.return_value = { "method": "post", @@ -18834,19 +22733,19 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( - cloud_deploy.DeliveryPipeline() + req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( + cloud_deploy.ListTargetsResponse() ) - request = cloud_deploy.GetDeliveryPipelineRequest() + request = cloud_deploy.ListTargetsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.DeliveryPipeline() + post.return_value = cloud_deploy.ListTargetsResponse() - client.get_delivery_pipeline( + client.list_targets( request, metadata=[ ("key", "val"), @@ -18858,8 +22757,8 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +def test_list_targets_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18867,9 +22766,7 @@ def test_get_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18881,10 +22778,10 @@ def test_get_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_delivery_pipeline(request) + client.list_targets(request) -def test_get_delivery_pipeline_rest_flattened(): +def test_list_targets_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18893,16 +22790,14 @@ def test_get_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = cloud_deploy.ListTargetsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -18910,25 +22805,24 @@ def test_get_delivery_pipeline_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_delivery_pipeline(**mock_args) + client.list_targets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_list_targets_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18937,225 +22831,113 @@ def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_delivery_pipeline( - cloud_deploy.GetDeliveryPipelineRequest(), - name="name_value", + client.list_targets( + cloud_deploy.ListTargetsRequest(), + parent="parent_value", + ) + + +def test_list_targets_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + next_page_token="abc", + ), + cloud_deploy.ListTargetsResponse( + targets=[], + next_page_token="def", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + ), ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_get_delivery_pipeline_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pager = client.list_targets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Target) for i in results) + + pages = list(client.list_targets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateDeliveryPipelineRequest, + cloud_deploy.RollbackTargetRequest, dict, ], ) -def test_create_delivery_pipeline_rest(request_type): +def test_rollback_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["delivery_pipeline"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.RollbackTargetResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.rollback_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.RollbackTargetResponse) -def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_rollback_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19169,45 +22951,37 @@ def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.rollback_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc request = {} - client.create_delivery_pipeline(request) + client.rollback_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_delivery_pipeline(request) + client.rollback_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.CreateDeliveryPipelineRequest, +def test_rollback_target_rest_required_fields( + request_type=cloud_deploy.RollbackTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["delivery_pipeline_id"] = "" + request_init["name"] = "" + request_init["target_id"] = "" + request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19215,40 +22989,30 @@ def test_create_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped - assert "deliveryPipelineId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).rollback_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deliveryPipelineId" in jsonified_request - assert ( - jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + jsonified_request["name"] = "name_value" + jsonified_request["targetId"] = "target_id_value" + jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delivery_pipeline_id", - "request_id", - "validate_only", - ) - ) + ).rollback_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "deliveryPipelineId" in jsonified_request - assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19257,7 +23021,7 @@ def test_create_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.RollbackTargetResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19277,50 +23041,41 @@ def test_create_delivery_pipeline_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.rollback_target(request) - expected_params = [ - ( - "deliveryPipelineId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_delivery_pipeline_rest_unset_required_fields(): +def test_rollback_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.rollback_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "deliveryPipelineId", - "requestId", - "validateOnly", - ) - ) + set(()) & set( ( - "parent", - "deliveryPipelineId", - "deliveryPipeline", + "name", + "targetId", + "rolloutId", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_delivery_pipeline_rest_interceptors(null_interceptor): +def test_rollback_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19333,16 +23088,14 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_rollback_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_rollback_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( - cloud_deploy.CreateDeliveryPipelineRequest() + pb_message = cloud_deploy.RollbackTargetRequest.pb( + cloud_deploy.RollbackTargetRequest() ) transcode.return_value = { "method": "post", @@ -19354,19 +23107,19 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( + cloud_deploy.RollbackTargetResponse() ) - request = cloud_deploy.CreateDeliveryPipelineRequest() + request = cloud_deploy.RollbackTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.RollbackTargetResponse() - client.create_delivery_pipeline( + client.rollback_target( request, metadata=[ ("key", "val"), @@ -19378,8 +23131,8 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +def test_rollback_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19387,7 +23140,9 @@ def test_create_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19399,285 +23154,129 @@ def test_create_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_delivery_pipeline(request) - - -def test_create_delivery_pipeline_rest_flattened(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_delivery_pipeline(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" - % client.transport._host, - args[1], - ) - - -def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_delivery_pipeline( - cloud_deploy.CreateDeliveryPipelineRequest(), - parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", - ) - - -def test_create_delivery_pipeline_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.rollback_target(request) -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.UpdateDeliveryPipelineRequest, - dict, - ], -) -def test_update_delivery_pipeline_rest(request_type): +def test_rollback_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } - request_init["delivery_pipeline"] = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + ) - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RollbackTargetResponse() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + mock_args.update(sample_request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.rollback_target(**mock_args) - subfields_not_in_runtime = [] + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" + % client.transport._host, + args[1], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_rollback_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_target( + cloud_deploy.RollbackTargetRequest(), + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + + +def test_rollback_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetTargetRequest, + dict, + ], +) +def test_get_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target( + name="name_value", + target_id="target_id_value", + uid="uid_value", + description="description_value", + require_approval=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Target) + assert response.name == "name_value" + assert response.target_id == "target_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.require_approval is True + assert response.etag == "etag_value" -def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_get_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19691,43 +23290,33 @@ def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.get_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_target] = mock_rpc request = {} - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.UpdateDeliveryPipelineRequest, -): +def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19738,26 +23327,21 @@ def test_update_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "request_id", - "update_mask", - "validate_only", - ) - ) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19766,7 +23350,7 @@ def test_update_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19778,52 +23362,39 @@ def test_update_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_delivery_pipeline_rest_unset_required_fields(): +def test_get_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "requestId", - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "deliveryPipeline", - ) - ) - ) + unset_fields = transport.get_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_delivery_pipeline_rest_interceptors(null_interceptor): +def test_get_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19836,17 +23407,13 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_get_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_get_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( - cloud_deploy.UpdateDeliveryPipelineRequest() - ) + pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19857,19 +23424,17 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) - request = cloud_deploy.UpdateDeliveryPipelineRequest() + request = cloud_deploy.GetTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Target() - client.update_delivery_pipeline( + client.get_target( request, metadata=[ ("key", "val"), @@ -19881,8 +23446,8 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +def test_get_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19890,11 +23455,7 @@ def test_update_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19906,10 +23467,10 @@ def test_update_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_delivery_pipeline(request) + client.get_target(request) -def test_update_delivery_pipeline_rest_flattened(): +def test_get_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19918,43 +23479,39 @@ def test_update_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # get arguments that satisfy an http rule for this method - sample_request = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_delivery_pipeline(**mock_args) + client.get_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_get_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19963,14 +23520,13 @@ def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_delivery_pipeline( - cloud_deploy.UpdateDeliveryPipelineRequest(), - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_target( + cloud_deploy.GetTargetRequest(), + name="name_value", ) -def test_update_delivery_pipeline_rest_error(): +def test_get_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19979,20 +23535,126 @@ def test_update_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteDeliveryPipelineRequest, + cloud_deploy.CreateTargetRequest, dict, ], ) -def test_delete_delivery_pipeline_rest(request_type): +def test_create_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["target"] = { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20007,13 +23669,13 @@ def test_delete_delivery_pipeline_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_create_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20027,22 +23689,17 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.create_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_target] = mock_rpc request = {} - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20051,20 +23708,21 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +def test_create_target_rest_required_fields( + request_type=cloud_deploy.CreateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["target_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20072,34 +23730,38 @@ def test_delete_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped + assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["targetId"] = "target_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "etag", - "force", "request_id", + "target_id", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20120,9 +23782,10 @@ def test_delete_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20132,35 +23795,45 @@ def test_delete_delivery_pipeline_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "targetId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_delivery_pipeline_rest_unset_required_fields(): +def test_create_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.create_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "etag", - "force", "requestId", + "targetId", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "parent", + "targetId", + "target", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): +def test_create_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20175,14 +23848,14 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_create_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_create_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( - cloud_deploy.DeleteDeliveryPipelineRequest() + pb_message = cloud_deploy.CreateTargetRequest.pb( + cloud_deploy.CreateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20198,7 +23871,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteDeliveryPipelineRequest() + request = cloud_deploy.CreateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20206,7 +23879,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_delivery_pipeline( + client.create_target( request, metadata=[ ("key", "val"), @@ -20218,8 +23891,8 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest +def test_create_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20227,9 +23900,7 @@ def test_delete_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20241,10 +23912,10 @@ def test_delete_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_delivery_pipeline(request) + client.create_target(request) -def test_delete_delivery_pipeline_rest_flattened(): +def test_create_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20256,13 +23927,13 @@ def test_delete_delivery_pipeline_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) mock_args.update(sample_request) @@ -20273,20 +23944,19 @@ def test_delete_delivery_pipeline_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_delivery_pipeline(**mock_args) + client.create_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_create_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20295,13 +23965,15 @@ def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_delivery_pipeline( - cloud_deploy.DeleteDeliveryPipelineRequest(), - name="name_value", + client.create_target( + cloud_deploy.CreateTargetRequest(), + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) -def test_delete_delivery_pipeline_rest_error(): +def test_create_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20310,46 +23982,149 @@ def test_delete_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListTargetsRequest, + cloud_deploy.UpdateTargetRequest, dict, ], ) -def test_list_targets_rest(request_type): +def test_update_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + request_init["target"] = { + "name": "projects/sample1/locations/sample2/targets/sample3", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTargetsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_targets_rest_use_cached_wrapped_rpc(): +def test_update_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20363,35 +24138,38 @@ def test_list_targets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_targets in client._transport._wrapped_methods + assert client._transport.update_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc + client._transport._wrapped_methods[client._transport.update_target] = mock_rpc request = {} - client.list_targets(request) + client.update_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_targets(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_targets_rest_required_fields( - request_type=cloud_deploy.ListTargetsRequest, +def test_update_target_rest_required_fields( + request_type=cloud_deploy.UpdateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20402,30 +24180,26 @@ def test_list_targets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "request_id", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20434,7 +24208,7 @@ def test_list_targets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20446,49 +24220,52 @@ def test_list_targets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_targets_rest_unset_required_fields(): +def test_update_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_targets._get_unset_required_fields({}) + unset_fields = transport.update_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "target", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_targets_rest_interceptors(null_interceptor): +def test_update_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20501,14 +24278,16 @@ def test_list_targets_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_targets" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_targets" + transports.CloudDeployRestInterceptor, "pre_update_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListTargetsRequest.pb( - cloud_deploy.ListTargetsRequest() + pb_message = cloud_deploy.UpdateTargetRequest.pb( + cloud_deploy.UpdateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20520,19 +24299,19 @@ def test_list_targets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( - cloud_deploy.ListTargetsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListTargetsRequest() + request = cloud_deploy.UpdateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListTargetsResponse() + post.return_value = operations_pb2.Operation() - client.list_targets( + client.update_target( request, metadata=[ ("key", "val"), @@ -20544,8 +24323,8 @@ def test_list_targets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_targets_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest +def test_update_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20553,7 +24332,9 @@ def test_list_targets_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20565,10 +24346,10 @@ def test_list_targets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_targets(request) + client.update_target(request) -def test_list_targets_rest_flattened(): +def test_update_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20577,39 +24358,41 @@ def test_list_targets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_targets(**mock_args) + client.update_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{target.name=projects/*/locations/*/targets/*}" + % client.transport._host, args[1], ) -def test_list_targets_rest_flattened_error(transport: str = "rest"): +def test_update_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20618,113 +24401,55 @@ def test_list_targets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_targets( - cloud_deploy.ListTargetsRequest(), - parent="parent_value", + client.update_target( + cloud_deploy.UpdateTargetRequest(), + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_targets_rest_pager(transport: str = "rest"): +def test_update_target_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - next_page_token="abc", - ), - cloud_deploy.ListTargetsResponse( - targets=[], - next_page_token="def", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_targets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Target) for i in results) - - pages = list(client.list_targets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RollbackTargetRequest, + cloud_deploy.DeleteTargetRequest, dict, ], ) -def test_rollback_target_rest(request_type): +def test_delete_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RollbackTargetResponse) + assert response.operation.name == "operations/spam" -def test_rollback_target_rest_use_cached_wrapped_rpc(): +def test_delete_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20738,37 +24463,39 @@ def test_rollback_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.rollback_target in client._transport._wrapped_methods + assert client._transport.delete_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc request = {} - client.rollback_target(request) + client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.rollback_target(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_rollback_target_rest_required_fields( - request_type=cloud_deploy.RollbackTargetRequest, +def test_delete_target_rest_required_fields( + request_type=cloud_deploy.DeleteTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["name"] = "" - request_init["target_id"] = "" - request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20779,27 +24506,30 @@ def test_rollback_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["targetId"] = "target_id_value" - jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" - assert "rolloutId" in jsonified_request - assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20808,7 +24538,7 @@ def test_rollback_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20820,49 +24550,46 @@ def test_rollback_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_rollback_target_rest_unset_required_fields(): +def test_delete_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.rollback_target._get_unset_required_fields({}) + unset_fields = transport.delete_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "name", - "targetId", - "rolloutId", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_target_rest_interceptors(null_interceptor): +def test_delete_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20875,14 +24602,16 @@ def test_rollback_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_rollback_target" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_rollback_target" + transports.CloudDeployRestInterceptor, "pre_delete_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.RollbackTargetRequest.pb( - cloud_deploy.RollbackTargetRequest() + pb_message = cloud_deploy.DeleteTargetRequest.pb( + cloud_deploy.DeleteTargetRequest() ) transcode.return_value = { "method": "post", @@ -20894,19 +24623,19 @@ def test_rollback_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( - cloud_deploy.RollbackTargetResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.RollbackTargetRequest() + request = cloud_deploy.DeleteTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.RollbackTargetResponse() + post.return_value = operations_pb2.Operation() - client.rollback_target( + client.delete_target( request, metadata=[ ("key", "val"), @@ -20918,8 +24647,8 @@ def test_rollback_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_rollback_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest +def test_delete_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20927,9 +24656,7 @@ def test_rollback_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20941,10 +24668,10 @@ def test_rollback_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.rollback_target(request) + client.delete_target(request) -def test_rollback_target_rest_flattened(): +def test_delete_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20953,44 +24680,37 @@ def test_rollback_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.rollback_target(**mock_args) + client.delete_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_rollback_target_rest_flattened_error(transport: str = "rest"): +def test_delete_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20999,15 +24719,13 @@ def test_rollback_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.rollback_target( - cloud_deploy.RollbackTargetRequest(), + client.delete_target( + cloud_deploy.DeleteTargetRequest(), name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) -def test_rollback_target_rest_error(): +def test_delete_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21016,54 +24734,46 @@ def test_rollback_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetTargetRequest, + cloud_deploy.ListCustomTargetTypesRequest, dict, ], ) -def test_get_target_rest(request_type): +def test_list_custom_target_types_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target( - name="name_value", - target_id="target_id_value", - uid="uid_value", - description="description_value", - require_approval=True, - etag="etag_value", + return_value = cloud_deploy.ListCustomTargetTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Target) - assert response.name == "name_value" - assert response.target_id == "target_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.require_approval is True - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListCustomTargetTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_target_rest_use_cached_wrapped_rpc(): +def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21077,33 +24787,40 @@ def test_get_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_target in client._transport._wrapped_methods + assert ( + client._transport.list_custom_target_types + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_custom_target_types + ] = mock_rpc request = {} - client.get_target(request) + client.list_custom_target_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_target(request) + client.list_custom_target_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): +def test_list_custom_target_types_rest_required_fields( + request_type=cloud_deploy.ListCustomTargetTypesRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21114,21 +24831,30 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21137,7 +24863,7 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21158,30 +24884,40 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_target_rest_unset_required_fields(): +def test_list_custom_target_types_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_target._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_target_rest_interceptors(null_interceptor): +def test_list_custom_target_types_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21194,13 +24930,15 @@ def test_get_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_target" + transports.CloudDeployRestInterceptor, "post_list_custom_target_types" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_target" + transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) + pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( + cloud_deploy.ListCustomTargetTypesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21211,17 +24949,19 @@ def test_get_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) + req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( + cloud_deploy.ListCustomTargetTypesResponse() + ) - request = cloud_deploy.GetTargetRequest() + request = cloud_deploy.ListCustomTargetTypesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Target() + post.return_value = cloud_deploy.ListCustomTargetTypesResponse() - client.get_target( + client.list_custom_target_types( request, metadata=[ ("key", "val"), @@ -21233,8 +24973,8 @@ def test_get_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetTargetRequest +def test_list_custom_target_types_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21242,7 +24982,7 @@ def test_get_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21254,10 +24994,10 @@ def test_get_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_target(request) + client.list_custom_target_types(request) -def test_get_target_rest_flattened(): +def test_list_custom_target_types_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21266,14 +25006,14 @@ def test_get_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -21281,24 +25021,25 @@ def test_get_target_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_target(**mock_args) + client.list_custom_target_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + % client.transport._host, args[1], ) -def test_get_target_rest_flattened_error(transport: str = "rest"): +def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21307,162 +25048,126 @@ def test_get_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_target( - cloud_deploy.GetTargetRequest(), - name="name_value", + client.list_custom_target_types( + cloud_deploy.ListCustomTargetTypesRequest(), + parent="parent_value", ) -def test_get_target_rest_error(): +def test_list_custom_target_types_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + next_page_token="abc", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[], + next_page_token="def", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_custom_target_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) + + pages = list(client.list_custom_target_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateTargetRequest, + cloud_deploy.GetCustomTargetTypeRequest, dict, ], ) -def test_create_target_rest(request_type): +def test_get_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["target"] = { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] - else: - del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType( + name="name_value", + custom_target_type_id="custom_target_type_id_value", + uid="uid_value", + description="description_value", + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.CustomTargetType) + assert response.name == "name_value" + assert response.custom_target_type_id == "custom_target_type_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" -def test_create_target_rest_use_cached_wrapped_rpc(): +def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21476,40 +25181,40 @@ def test_create_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_target in client._transport._wrapped_methods + assert ( + client._transport.get_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_custom_target_type + ] = mock_rpc request = {} - client.create_target(request) + client.get_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_target(request) + client.get_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_target_rest_required_fields( - request_type=cloud_deploy.CreateTargetRequest, +def test_get_custom_target_type_rest_required_fields( + request_type=cloud_deploy.GetCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["target_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21517,38 +25222,24 @@ def test_create_target_rest_required_fields( ) # verify fields with default values are dropped - assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["targetId"] = "target_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "target_id", - "validate_only", - ) - ) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21557,7 +25248,7 @@ def test_create_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21569,58 +25260,39 @@ def test_create_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) - expected_params = [ - ( - "targetId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_target_rest_unset_required_fields(): +def test_get_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_target._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "targetId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "targetId", - "target", - ) - ) - ) + unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_target_rest_interceptors(null_interceptor): +def test_get_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21633,16 +25305,14 @@ def test_create_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_target" + transports.CloudDeployRestInterceptor, "post_get_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_target" + transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateTargetRequest.pb( - cloud_deploy.CreateTargetRequest() + pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( + cloud_deploy.GetCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -21654,19 +25324,19 @@ def test_create_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.CustomTargetType.to_json( + cloud_deploy.CustomTargetType() ) - request = cloud_deploy.CreateTargetRequest() + request = cloud_deploy.GetCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.CustomTargetType() - client.create_target( + client.get_custom_target_type( request, metadata=[ ("key", "val"), @@ -21678,8 +25348,8 @@ def test_create_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest +def test_get_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21687,7 +25357,9 @@ def test_create_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21699,10 +25371,10 @@ def test_create_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_target(request) + client.get_custom_target_type(request) -def test_create_target_rest_flattened(): +def test_get_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21711,39 +25383,42 @@ def test_create_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_target(**mock_args) + client.get_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_create_target_rest_flattened_error(transport: str = "rest"): +def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21752,15 +25427,13 @@ def test_create_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_target( - cloud_deploy.CreateTargetRequest(), - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + client.get_custom_target_type( + cloud_deploy.GetCustomTargetTypeRequest(), + name="name_value", ) -def test_create_target_rest_error(): +def test_get_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21769,67 +25442,60 @@ def test_create_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateTargetRequest, + cloud_deploy.CreateCustomTargetTypeRequest, dict, ], ) -def test_update_target_rest(request_type): +def test_create_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } - request_init["target"] = { - "name": "projects/sample1/locations/sample2/targets/sample3", - "target_id": "target_id_value", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["custom_target_type"] = { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, - "require_approval": True, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21857,7 +25523,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -21887,10 +25553,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] else: - del request_init["target"][field][subfield] + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21905,13 +25571,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_target_rest_use_cached_wrapped_rpc(): +def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21925,17 +25591,22 @@ def test_update_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_target in client._transport._wrapped_methods + assert ( + client._transport.create_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_custom_target_type + ] = mock_rpc request = {} - client.update_target(request) + client.create_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -21944,19 +25615,21 @@ def test_update_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_target(request) + client.create_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_target_rest_required_fields( - request_type=cloud_deploy.UpdateTargetRequest, +def test_create_custom_target_type_rest_required_fields( + request_type=cloud_deploy.CreateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["custom_target_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21964,29 +25637,40 @@ def test_update_target_rest_required_fields( ) # verify fields with default values are dropped + assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "customTargetTypeId" in jsonified_request + assert ( + jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "custom_target_type_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "customTargetTypeId" in jsonified_request + assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22007,7 +25691,7 @@ def test_update_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -22020,39 +25704,45 @@ def test_update_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "customTargetTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_target_rest_unset_required_fields(): +def test_create_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_target._get_unset_required_fields({}) + unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "customTargetTypeId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "target", + "parent", + "customTargetTypeId", + "customTargetType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_target_rest_interceptors(null_interceptor): +def test_create_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22067,14 +25757,14 @@ def test_update_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_target" + transports.CloudDeployRestInterceptor, "post_create_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_target" + transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateTargetRequest.pb( - cloud_deploy.UpdateTargetRequest() + pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( + cloud_deploy.CreateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22090,7 +25780,7 @@ def test_update_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateTargetRequest() + request = cloud_deploy.CreateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22098,7 +25788,7 @@ def test_update_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_target( + client.create_custom_target_type( request, metadata=[ ("key", "val"), @@ -22110,8 +25800,8 @@ def test_update_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest +def test_create_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22119,9 +25809,7 @@ def test_update_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22133,10 +25821,10 @@ def test_update_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_target(request) + client.create_custom_target_type(request) -def test_update_target_rest_flattened(): +def test_create_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22148,14 +25836,13 @@ def test_update_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) mock_args.update(sample_request) @@ -22166,20 +25853,20 @@ def test_update_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_target(**mock_args) + client.create_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{target.name=projects/*/locations/*/targets/*}" + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" % client.transport._host, args[1], ) -def test_update_target_rest_flattened_error(transport: str = "rest"): +def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22188,14 +25875,15 @@ def test_update_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_target( - cloud_deploy.UpdateTargetRequest(), - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_custom_target_type( + cloud_deploy.CreateCustomTargetTypeRequest(), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) -def test_update_target_rest_error(): +def test_create_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22204,18 +25892,125 @@ def test_update_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteTargetRequest, + cloud_deploy.UpdateCustomTargetTypeRequest, dict, ], ) -def test_delete_target_rest(request_type): +def test_update_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } + request_init["custom_target_type"] = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] + else: + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22230,13 +26025,13 @@ def test_delete_target_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_target_rest_use_cached_wrapped_rpc(): +def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22250,17 +26045,22 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_target in client._transport._wrapped_methods + assert ( + client._transport.update_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_custom_target_type + ] = mock_rpc request = {} - client.delete_target(request) + client.update_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22269,20 +26069,19 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_target(request) + client.update_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_target_rest_required_fields( - request_type=cloud_deploy.DeleteTargetRequest, +def test_update_custom_target_type_rest_required_fields( + request_type=cloud_deploy.UpdateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22293,30 +26092,26 @@ def test_delete_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "allow_missing", - "etag", "request_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22337,9 +26132,10 @@ def test_delete_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22349,34 +26145,39 @@ def test_delete_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_target_rest_unset_required_fields(): +def test_update_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_target._get_unset_required_fields({}) + unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "allowMissing", - "etag", "requestId", + "updateMask", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "updateMask", + "customTargetType", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_target_rest_interceptors(null_interceptor): +def test_update_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22391,14 +26192,14 @@ def test_delete_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_target" + transports.CloudDeployRestInterceptor, "post_update_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_target" + transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteTargetRequest.pb( - cloud_deploy.DeleteTargetRequest() + pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( + cloud_deploy.UpdateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22414,7 +26215,7 @@ def test_delete_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteTargetRequest() + request = cloud_deploy.UpdateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22422,7 +26223,7 @@ def test_delete_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_target( + client.update_custom_target_type( request, metadata=[ ("key", "val"), @@ -22434,8 +26235,8 @@ def test_delete_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest +def test_update_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22443,7 +26244,11 @@ def test_delete_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22455,10 +26260,10 @@ def test_delete_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_target(request) + client.update_custom_target_type(request) -def test_delete_target_rest_flattened(): +def test_update_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22470,11 +26275,16 @@ def test_delete_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -22485,19 +26295,20 @@ def test_delete_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_target(**mock_args) + client.update_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_delete_target_rest_flattened_error(transport: str = "rest"): +def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22506,13 +26317,14 @@ def test_delete_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_target( - cloud_deploy.DeleteTargetRequest(), - name="name_value", + client.update_custom_target_type( + cloud_deploy.UpdateCustomTargetTypeRequest(), + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_target_rest_error(): +def test_update_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22521,46 +26333,41 @@ def test_delete_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListCustomTargetTypesRequest, + cloud_deploy.DeleteCustomTargetTypeRequest, dict, ], ) -def test_list_custom_target_types_rest(request_type): +def test_delete_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomTargetTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): +def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22575,7 +26382,7 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_target_types + client._transport.delete_custom_target_type in client._transport._wrapped_methods ) @@ -22585,29 +26392,33 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_target_types + client._transport.delete_custom_target_type ] = mock_rpc request = {} - client.list_custom_target_types(request) + client.delete_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_target_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_target_types_rest_required_fields( - request_type=cloud_deploy.ListCustomTargetTypesRequest, +def test_delete_custom_target_type_rest_required_fields( + request_type=cloud_deploy.DeleteCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22618,30 +26429,30 @@ def test_list_custom_target_types_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "etag", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22650,7 +26461,7 @@ def test_list_custom_target_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22662,49 +26473,46 @@ def test_list_custom_target_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_custom_target_types_rest_unset_required_fields(): +def test_delete_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_target_types_rest_interceptors(null_interceptor): +def test_delete_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22717,14 +26525,16 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_custom_target_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" + transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( - cloud_deploy.ListCustomTargetTypesRequest() + pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( + cloud_deploy.DeleteCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22736,19 +26546,19 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( - cloud_deploy.ListCustomTargetTypesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListCustomTargetTypesRequest() + request = cloud_deploy.DeleteCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListCustomTargetTypesResponse() + post.return_value = operations_pb2.Operation() - client.list_custom_target_types( + client.delete_custom_target_type( request, metadata=[ ("key", "val"), @@ -22760,8 +26570,8 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_custom_target_types_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest +def test_delete_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22769,7 +26579,9 @@ def test_list_custom_target_types_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22781,10 +26593,10 @@ def test_list_custom_target_types_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_custom_target_types(request) + client.delete_custom_target_type(request) -def test_list_custom_target_types_rest_flattened(): +def test_delete_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22793,40 +26605,40 @@ def test_list_custom_target_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_custom_target_types(**mock_args) + client.delete_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" % client.transport._host, args[1], ) -def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): +def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22835,83 +26647,26 @@ def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_target_types( - cloud_deploy.ListCustomTargetTypesRequest(), - parent="parent_value", + client.delete_custom_target_type( + cloud_deploy.DeleteCustomTargetTypeRequest(), + name="name_value", ) -def test_list_custom_target_types_rest_pager(transport: str = "rest"): +def test_delete_custom_target_type_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - next_page_token="abc", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[], - next_page_token="def", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_custom_target_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) - - pages = list(client.list_custom_target_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetCustomTargetTypeRequest, + cloud_deploy.ListReleasesRequest, dict, ], ) -def test_get_custom_target_type_rest(request_type): +def test_list_releases_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22919,42 +26674,36 @@ def test_get_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType( - name="name_value", - custom_target_type_id="custom_target_type_id_value", - uid="uid_value", - description="description_value", - etag="etag_value", + return_value = cloud_deploy.ListReleasesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CustomTargetType) - assert response.name == "name_value" - assert response.custom_target_type_id == "custom_target_type_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListReleasesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_list_releases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22968,40 +26717,35 @@ def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.list_releases in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc request = {} - client.get_custom_target_type(request) + client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_target_type(request) + client.list_releases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_target_type_rest_required_fields( - request_type=cloud_deploy.GetCustomTargetTypeRequest, +def test_list_releases_rest_required_fields( + request_type=cloud_deploy.ListReleasesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23012,21 +26756,30 @@ def test_get_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23035,7 +26788,7 @@ def test_get_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23056,30 +26809,40 @@ def test_get_custom_target_type_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_target_type_rest_unset_required_fields(): +def test_list_releases_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_releases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_target_type_rest_interceptors(null_interceptor): +def test_list_releases_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23092,14 +26855,14 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_custom_target_type" + transports.CloudDeployRestInterceptor, "post_list_releases" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_list_releases" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( - cloud_deploy.GetCustomTargetTypeRequest() + pb_message = cloud_deploy.ListReleasesRequest.pb( + cloud_deploy.ListReleasesRequest() ) transcode.return_value = { "method": "post", @@ -23111,19 +26874,19 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.CustomTargetType.to_json( - cloud_deploy.CustomTargetType() + req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( + cloud_deploy.ListReleasesResponse() ) - request = cloud_deploy.GetCustomTargetTypeRequest() + request = cloud_deploy.ListReleasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.CustomTargetType() + post.return_value = cloud_deploy.ListReleasesResponse() - client.get_custom_target_type( + client.list_releases( request, metadata=[ ("key", "val"), @@ -23135,8 +26898,8 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest +def test_list_releases_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23145,7 +26908,7 @@ def test_get_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -23158,10 +26921,10 @@ def test_get_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_custom_target_type(request) + client.list_releases(request) -def test_get_custom_target_type_rest_flattened(): +def test_list_releases_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23170,16 +26933,16 @@ def test_get_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23187,25 +26950,25 @@ def test_get_custom_target_type_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_custom_target_type(**mock_args) + client.list_releases(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_list_releases_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23214,157 +26977,134 @@ def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_target_type( - cloud_deploy.GetCustomTargetTypeRequest(), - name="name_value", + client.list_releases( + cloud_deploy.ListReleasesRequest(), + parent="parent_value", ) -def test_get_custom_target_type_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CreateCustomTargetTypeRequest, - dict, - ], -) -def test_create_custom_target_type_rest(request_type): +def test_list_releases_rest_pager(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["custom_target_type"] = { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + next_page_token="abc", + ), + cloud_deploy.ListReleasesResponse( + releases=[], + next_page_token="def", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_releases(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Release) for i in results) - subfields_not_in_runtime = [] + pages = list(client.list_releases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetReleaseRequest, + dict, + ], +) +def test_get_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] - else: - del request_init["custom_target_type"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release( + name="name_value", + uid="uid_value", + description="description_value", + abandoned=True, + skaffold_config_uri="skaffold_config_uri_value", + skaffold_config_path="skaffold_config_path_value", + render_state=cloud_deploy.Release.RenderState.SUCCEEDED, + etag="etag_value", + skaffold_version="skaffold_version_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Release) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.abandoned is True + assert response.skaffold_config_uri == "skaffold_config_uri_value" + assert response.skaffold_config_path == "skaffold_config_path_value" + assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED + assert response.etag == "etag_value" + assert response.skaffold_version == "skaffold_version_value" -def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_get_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23378,45 +27118,33 @@ def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.get_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_release] = mock_rpc request = {} - client.create_custom_target_type(request) + client.get_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_custom_target_type(request) + client.get_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_target_type_rest_required_fields( - request_type=cloud_deploy.CreateCustomTargetTypeRequest, -): +def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["custom_target_type_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23424,40 +27152,24 @@ def test_create_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped - assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "customTargetTypeId" in jsonified_request - assert ( - jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "custom_target_type_id", - "request_id", - "validate_only", - ) - ) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "customTargetTypeId" in jsonified_request - assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23466,7 +27178,7 @@ def test_create_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23478,58 +27190,39 @@ def test_create_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) - expected_params = [ - ( - "customTargetTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_target_type_rest_unset_required_fields(): +def test_get_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "customTargetTypeId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "customTargetTypeId", - "customTargetType", - ) - ) - ) + unset_fields = transport.get_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_target_type_rest_interceptors(null_interceptor): +def test_get_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23542,17 +27235,13 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_custom_target_type" + transports.CloudDeployRestInterceptor, "post_get_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_get_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( - cloud_deploy.CreateCustomTargetTypeRequest() - ) + pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23563,19 +27252,17 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) - request = cloud_deploy.CreateCustomTargetTypeRequest() + request = cloud_deploy.GetReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Release() - client.create_custom_target_type( + client.get_release( request, metadata=[ ("key", "val"), @@ -23587,8 +27274,8 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest +def test_get_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23596,7 +27283,9 @@ def test_create_custom_target_type_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23608,10 +27297,10 @@ def test_create_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_custom_target_type(request) + client.get_release(request) -def test_create_custom_target_type_rest_flattened(): +def test_get_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23620,40 +27309,42 @@ def test_create_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_custom_target_type(**mock_args) + client.get_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" % client.transport._host, args[1], ) -def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_get_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23662,15 +27353,13 @@ def test_create_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_target_type( - cloud_deploy.CreateCustomTargetTypeRequest(), - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + client.get_release( + cloud_deploy.GetReleaseRequest(), + name="name_value", ) -def test_create_custom_target_type_rest_error(): +def test_get_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23679,11 +27368,11 @@ def test_create_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateCustomTargetTypeRequest, + cloud_deploy.CreateReleaseRequest, dict, ], ) -def test_update_custom_target_type_rest(request_type): +def test_create_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23691,52 +27380,231 @@ def test_update_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - request_init["custom_target_type"] = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", - "custom_target_type_id": "custom_target_type_id_value", + request_init["release"] = { + "name": "name_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, + "abandoned": True, "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, + "render_start_time": {}, + "render_end_time": {}, + "skaffold_config_uri": "skaffold_config_uri_value", + "skaffold_config_path": "skaffold_config_path_value", + "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], + "delivery_pipeline_snapshot": { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [ + {"values": {}, "match_target_labels": {}} + ], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": [ + "missing_targets_value1", + "missing_targets_value2", + ], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + }, + "target_snapshots": [ + { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": { + "target_ids": ["target_ids_value1", "target_ids_value2"] + }, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + ], + "custom_target_type_snapshots": [ + { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + ], + "render_state": 1, "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], + "skaffold_version": "skaffold_version_value", + "target_artifacts": {}, + "target_renders": {}, + "condition": { + "release_ready_condition": {"status": True}, + "skaffold_supported_condition": { + "status": True, + "skaffold_support_state": 1, + "maintenance_mode_time": {}, + "support_expiration_time": {}, + }, }, + "deploy_parameters": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -23764,7 +27632,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + for field, value in request_init["release"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -23794,10 +27662,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] + for i in range(0, len(request_init["release"][field])): + del request_init["release"][field][i][subfield] else: - del request_init["custom_target_type"][field][subfield] + del request_init["release"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23812,13 +27680,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_create_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23832,22 +27700,17 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.create_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_release] = mock_rpc request = {} - client.update_custom_target_type(request) + client.create_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23856,19 +27719,21 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_custom_target_type(request) + client.create_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_custom_target_type_rest_required_fields( - request_type=cloud_deploy.UpdateCustomTargetTypeRequest, +def test_create_release_rest_required_fields( + request_type=cloud_deploy.CreateReleaseRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23876,29 +27741,39 @@ def test_update_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped + assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == request_init["release_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "override_deploy_policy", + "release_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23919,7 +27794,7 @@ def test_update_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -23932,39 +27807,46 @@ def test_update_custom_target_type_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "releaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_custom_target_type_rest_unset_required_fields(): +def test_create_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) + unset_fields = transport.create_release._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "overrideDeployPolicy", + "releaseId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "customTargetType", + "parent", + "releaseId", + "release", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_custom_target_type_rest_interceptors(null_interceptor): +def test_create_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23979,14 +27861,14 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_custom_target_type" + transports.CloudDeployRestInterceptor, "post_create_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_create_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( - cloud_deploy.UpdateCustomTargetTypeRequest() + pb_message = cloud_deploy.CreateReleaseRequest.pb( + cloud_deploy.CreateReleaseRequest() ) transcode.return_value = { "method": "post", @@ -24002,7 +27884,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateCustomTargetTypeRequest() + request = cloud_deploy.CreateReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24010,7 +27892,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_custom_target_type( + client.create_release( request, metadata=[ ("key", "val"), @@ -24022,8 +27904,8 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest +def test_create_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24032,9 +27914,7 @@ def test_update_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -24047,10 +27927,10 @@ def test_update_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_custom_target_type(request) + client.create_release(request) -def test_update_custom_target_type_rest_flattened(): +def test_create_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24063,15 +27943,14 @@ def test_update_custom_target_type_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) mock_args.update(sample_request) @@ -24082,20 +27961,20 @@ def test_update_custom_target_type_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_custom_target_type(**mock_args) + client.create_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_create_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24104,14 +27983,15 @@ def test_update_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_target_type( - cloud_deploy.UpdateCustomTargetTypeRequest(), - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_release( + cloud_deploy.CreateReleaseRequest(), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) -def test_update_custom_target_type_rest_error(): +def test_create_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24120,11 +28000,11 @@ def test_update_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteCustomTargetTypeRequest, + cloud_deploy.AbandonReleaseRequest, dict, ], ) -def test_delete_custom_target_type_rest(request_type): +def test_abandon_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24132,29 +28012,31 @@ def test_delete_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.AbandonReleaseResponse) -def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_abandon_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24168,39 +28050,30 @@ def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.abandon_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc request = {} - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_custom_target_type_rest_required_fields( - request_type=cloud_deploy.DeleteCustomTargetTypeRequest, +def test_abandon_release_rest_required_fields( + request_type=cloud_deploy.AbandonReleaseRequest, ): transport_class = transports.CloudDeployRestTransport @@ -24216,7 +28089,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24225,16 +28098,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "request_id", - "validate_only", - ) - ) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24248,7 +28112,7 @@ def test_delete_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24260,46 +28124,40 @@ def test_delete_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_custom_target_type_rest_unset_required_fields(): +def test_abandon_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.abandon_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_custom_target_type_rest_interceptors(null_interceptor): +def test_abandon_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24312,16 +28170,14 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "post_abandon_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_abandon_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( - cloud_deploy.DeleteCustomTargetTypeRequest() + pb_message = cloud_deploy.AbandonReleaseRequest.pb( + cloud_deploy.AbandonReleaseRequest() ) transcode.return_value = { "method": "post", @@ -24333,19 +28189,19 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( + cloud_deploy.AbandonReleaseResponse() ) - request = cloud_deploy.DeleteCustomTargetTypeRequest() + request = cloud_deploy.AbandonReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.AbandonReleaseResponse() - client.delete_custom_target_type( + client.abandon_release( request, metadata=[ ("key", "val"), @@ -24357,8 +28213,8 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest +def test_abandon_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24367,7 +28223,7 @@ def test_delete_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) @@ -24380,10 +28236,10 @@ def test_delete_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_custom_target_type(request) + client.abandon_release(request) -def test_delete_custom_target_type_rest_flattened(): +def test_abandon_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24392,11 +28248,11 @@ def test_delete_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } # get truthy value for each flattened field @@ -24408,24 +28264,26 @@ def test_delete_custom_target_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_custom_target_type(**mock_args) + client.abandon_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" % client.transport._host, args[1], ) -def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_abandon_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24434,13 +28292,13 @@ def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_custom_target_type( - cloud_deploy.DeleteCustomTargetTypeRequest(), + client.abandon_release( + cloud_deploy.AbandonReleaseRequest(), name="name_value", ) -def test_delete_custom_target_type_rest_error(): +def test_abandon_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24449,48 +28307,151 @@ def test_delete_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListReleasesRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_list_releases_rest(request_type): +def test_create_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deploy_policy"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "rollout_restriction": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_windows": { + "time_zone": "time_zone_value", + "one_time_windows": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_date": {}, + "end_time": {}, + } + ], + "weekly_windows": [ + {"days_of_week": [1], "start_time": {}, "end_time": {}} + ], + }, + } + } + ], + "etag": "etag_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReleasesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_releases_rest_use_cached_wrapped_rpc(): +def test_create_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24504,35 +28465,44 @@ def test_list_releases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_releases in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.list_releases(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_releases(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_releases_rest_required_fields( - request_type=cloud_deploy.ListReleasesRequest, +def test_create_deploy_policy_rest_required_fields( + request_type=cloud_deploy.CreateDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" + request_init["deploy_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24540,26 +28510,29 @@ def test_list_releases_rest_required_fields( ) # verify fields with default values are dropped + assert "deployPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == request_init["deploy_policy_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["deployPolicyId"] = "deploy_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "deploy_policy_id", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -24567,6 +28540,8 @@ def test_list_releases_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == "deploy_policy_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24575,7 +28550,7 @@ def test_list_releases_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24587,49 +28562,58 @@ def test_list_releases_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "deployPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_releases_rest_unset_required_fields(): +def test_create_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_releases._get_unset_required_fields({}) + unset_fields = transport.create_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "deployPolicyId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deployPolicyId", + "deployPolicy", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_releases_rest_interceptors(null_interceptor): +def test_create_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24642,14 +28626,16 @@ def test_list_releases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_releases" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_releases" + transports.CloudDeployRestInterceptor, "pre_create_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListReleasesRequest.pb( - cloud_deploy.ListReleasesRequest() + pb_message = cloud_deploy.CreateDeployPolicyRequest.pb( + cloud_deploy.CreateDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -24661,19 +28647,19 @@ def test_list_releases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( - cloud_deploy.ListReleasesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListReleasesRequest() + request = cloud_deploy.CreateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListReleasesResponse() + post.return_value = operations_pb2.Operation() - client.list_releases( + client.create_deploy_policy( request, metadata=[ ("key", "val"), @@ -24685,8 +28671,8 @@ def test_list_releases_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_releases_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest +def test_create_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24694,9 +28680,7 @@ def test_list_releases_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24708,10 +28692,10 @@ def test_list_releases_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_releases(request) + client.create_deploy_policy(request) -def test_list_releases_rest_flattened(): +def test_create_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24720,42 +28704,40 @@ def test_list_releases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_releases(**mock_args) + client.create_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_list_releases_rest_flattened_error(transport: str = "rest"): +def test_create_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24764,134 +28746,172 @@ def test_list_releases_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_releases( - cloud_deploy.ListReleasesRequest(), + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) -def test_list_releases_rest_pager(transport: str = "rest"): +def test_create_deploy_policy_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - next_page_token="abc", - ), - cloud_deploy.ListReleasesResponse( - releases=[], - next_page_token="def", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - - pager = client.list_releases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Release) for i in results) - - pages = list(client.list_releases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetReleaseRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_get_release_rest(request_type): +def test_update_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + # send a request that will satisfy transcoding + request_init = { + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + } + request_init["deploy_policy"] = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "rollout_restriction": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_windows": { + "time_zone": "time_zone_value", + "one_time_windows": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_date": {}, + "end_time": {}, + } + ], + "weekly_windows": [ + {"days_of_week": [1], "start_time": {}, "end_time": {}} + ], + }, + } + } + ], + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release( - name="name_value", - uid="uid_value", - description="description_value", - abandoned=True, - skaffold_config_uri="skaffold_config_uri_value", - skaffold_config_path="skaffold_config_path_value", - render_state=cloud_deploy.Release.RenderState.SUCCEEDED, - etag="etag_value", - skaffold_version="skaffold_version_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Release) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.abandoned is True - assert response.skaffold_config_uri == "skaffold_config_uri_value" - assert response.skaffold_config_path == "skaffold_config_path_value" - assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED - assert response.etag == "etag_value" - assert response.skaffold_version == "skaffold_version_value" + assert response.operation.name == "operations/spam" -def test_get_release_rest_use_cached_wrapped_rpc(): +def test_update_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24905,33 +28925,42 @@ def test_get_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_release in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.get_release(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_release(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): +def test_update_deploy_policy_rest_required_fields( + request_type=cloud_deploy.UpdateDeployPolicyRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24942,21 +28971,26 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24965,7 +28999,7 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24977,39 +29011,52 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_release_rest_unset_required_fields(): +def test_update_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_release._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deployPolicy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_release_rest_interceptors(null_interceptor): +def test_update_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25022,13 +29069,17 @@ def test_get_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_release" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_release" + transports.CloudDeployRestInterceptor, "pre_update_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) + pb_message = cloud_deploy.UpdateDeployPolicyRequest.pb( + cloud_deploy.UpdateDeployPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25039,17 +29090,19 @@ def test_get_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = cloud_deploy.GetReleaseRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Release() + post.return_value = operations_pb2.Operation() - client.get_release( + client.update_deploy_policy( request, metadata=[ ("key", "val"), @@ -25061,8 +29114,8 @@ def test_get_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest +def test_update_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25071,7 +29124,9 @@ def test_get_release_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } request = request_type(**request_init) @@ -25084,10 +29139,10 @@ def test_get_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_release(request) + client.update_deploy_policy(request) -def test_get_release_rest_flattened(): +def test_update_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25096,42 +29151,43 @@ def test_get_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_release(**mock_args) + client.update_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" + "%s/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_get_release_rest_flattened_error(transport: str = "rest"): +def test_update_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25140,13 +29196,14 @@ def test_get_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_release( - cloud_deploy.GetReleaseRequest(), - name="name_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_release_rest_error(): +def test_update_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25155,325 +29212,370 @@ def test_get_release_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateReleaseRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_create_release_rest(request_type): +def test_delete_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - request_init["release"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "abandoned": True, - "create_time": {"seconds": 751, "nanos": 543}, - "render_start_time": {}, - "render_end_time": {}, - "skaffold_config_uri": "skaffold_config_uri_value", - "skaffold_config_path": "skaffold_config_path_value", - "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], - "delivery_pipeline_snapshot": { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [ - {"values": {}, "match_target_labels": {}} - ], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": [ - "missing_targets_value1", - "missing_targets_value2", - ], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - }, - "target_snapshots": [ - { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {}, - "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": { - "target_ids": ["target_ids_value1", "target_ids_value2"] - }, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {}, - "verbose": True, - } - ], - "deploy_parameters": {}, - } - ], - "custom_target_type_snapshots": [ - { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deploy_policy(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deploy_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc + + request = {} + client.delete_deploy_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deploy_policy_rest_required_fields( + request_type=cloud_deploy.DeleteDeployPolicyRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } - ], - "render_state": 1, - "etag": "etag_value", - "skaffold_version": "skaffold_version_value", - "target_artifacts": {}, - "target_renders": {}, - "condition": { - "release_ready_condition": {"status": True}, - "skaffold_supported_condition": { - "status": True, - "skaffold_support_state": 1, - "maintenance_mode_time": {}, - "support_expiration_time": {}, - }, - }, - "deploy_parameters": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + transcode.return_value = transcode_result - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deploy_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deploy_policy_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deploy_policy_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_deploy_policy" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_delete_deploy_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.DeleteDeployPolicyRequest.pb( + cloud_deploy.DeleteDeployPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + request = cloud_deploy.DeleteDeployPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - subfields_not_in_runtime = [] + client.delete_deploy_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["release"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + pre.assert_called_once() + post.assert_called_once() - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["release"][field])): - del request_init["release"][field][i][subfield] - else: - del request_init["release"][field][subfield] +def test_delete_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeployPolicyRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deploy_policy(request) + + +def test_delete_deploy_policy_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deploy_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deploy_policy_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), + name="name_value", + ) + + +def test_delete_deploy_policy_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeployPoliciesRequest, + dict, + ], +) +def test_list_deploy_policies_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeployPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListDeployPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_release_rest_use_cached_wrapped_rpc(): +def test_list_deploy_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25487,40 +29589,39 @@ def test_create_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_release in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.create_release(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_release(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_release_rest_required_fields( - request_type=cloud_deploy.CreateReleaseRequest, +def test_list_deploy_policies_rest_required_fields( + request_type=cloud_deploy.ListDeployPoliciesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" - request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25528,29 +29629,26 @@ def test_create_release_rest_required_fields( ) # verify fields with default values are dropped - assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == request_init["release_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "release_id", - "request_id", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -25558,8 +29656,6 @@ def test_create_release_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25568,7 +29664,7 @@ def test_create_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25580,58 +29676,49 @@ def test_create_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) - expected_params = [ - ( - "releaseId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_release_rest_unset_required_fields(): +def test_list_deploy_policies_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_release._get_unset_required_fields({}) + unset_fields = transport.list_deploy_policies._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "releaseId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "releaseId", - "release", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_release_rest_interceptors(null_interceptor): +def test_list_deploy_policies_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25644,16 +29731,14 @@ def test_create_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_release" + transports.CloudDeployRestInterceptor, "post_list_deploy_policies" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_release" + transports.CloudDeployRestInterceptor, "pre_list_deploy_policies" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateReleaseRequest.pb( - cloud_deploy.CreateReleaseRequest() + pb_message = cloud_deploy.ListDeployPoliciesRequest.pb( + cloud_deploy.ListDeployPoliciesRequest() ) transcode.return_value = { "method": "post", @@ -25665,19 +29750,19 @@ def test_create_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.ListDeployPoliciesResponse.to_json( + cloud_deploy.ListDeployPoliciesResponse() ) - request = cloud_deploy.CreateReleaseRequest() + request = cloud_deploy.ListDeployPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.ListDeployPoliciesResponse() - client.create_release( + client.list_deploy_policies( request, metadata=[ ("key", "val"), @@ -25689,8 +29774,8 @@ def test_create_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest +def test_list_deploy_policies_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25698,9 +29783,7 @@ def test_create_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25712,10 +29795,10 @@ def test_create_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_release(request) + client.list_deploy_policies(request) -def test_create_release_rest_flattened(): +def test_list_deploy_policies_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25724,42 +29807,40 @@ def test_create_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_release(**mock_args) + client.list_deploy_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_create_release_rest_flattened_error(transport: str = "rest"): +def test_list_deploy_policies_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25768,60 +29849,124 @@ def test_create_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_release( - cloud_deploy.CreateReleaseRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) -def test_create_release_rest_error(): +def test_list_deploy_policies_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], + next_page_token="def", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeployPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deploy_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) + + pages = list(client.list_deploy_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AbandonReleaseRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_abandon_release_rest(request_type): +def test_get_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy( + name="name_value", + uid="uid_value", + description="description_value", + suspended=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AbandonReleaseResponse) + assert isinstance(response, cloud_deploy.DeployPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.suspended is True + assert response.etag == "etag_value" -def test_abandon_release_rest_use_cached_wrapped_rpc(): +def test_get_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25835,30 +29980,32 @@ def test_abandon_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.abandon_release in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_abandon_release_rest_required_fields( - request_type=cloud_deploy.AbandonReleaseRequest, +def test_get_deploy_policy_rest_required_fields( + request_type=cloud_deploy.GetDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport @@ -25874,7 +30021,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25883,7 +30030,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -25897,7 +30044,7 @@ def test_abandon_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25909,40 +30056,39 @@ def test_abandon_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_abandon_release_rest_unset_required_fields(): +def test_get_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.abandon_release._get_unset_required_fields({}) + unset_fields = transport.get_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_abandon_release_rest_interceptors(null_interceptor): +def test_get_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25955,14 +30101,14 @@ def test_abandon_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_abandon_release" + transports.CloudDeployRestInterceptor, "post_get_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_abandon_release" + transports.CloudDeployRestInterceptor, "pre_get_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.AbandonReleaseRequest.pb( - cloud_deploy.AbandonReleaseRequest() + pb_message = cloud_deploy.GetDeployPolicyRequest.pb( + cloud_deploy.GetDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -25974,19 +30120,19 @@ def test_abandon_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( - cloud_deploy.AbandonReleaseResponse() + req.return_value._content = cloud_deploy.DeployPolicy.to_json( + cloud_deploy.DeployPolicy() ) - request = cloud_deploy.AbandonReleaseRequest() + request = cloud_deploy.GetDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.AbandonReleaseResponse() + post.return_value = cloud_deploy.DeployPolicy() - client.abandon_release( + client.get_deploy_policy( request, metadata=[ ("key", "val"), @@ -25998,8 +30144,8 @@ def test_abandon_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_abandon_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest +def test_get_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26007,9 +30153,7 @@ def test_abandon_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26021,10 +30165,10 @@ def test_abandon_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.abandon_release(request) + client.get_deploy_policy(request) -def test_abandon_release_rest_flattened(): +def test_get_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26033,11 +30177,11 @@ def test_abandon_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" } # get truthy value for each flattened field @@ -26050,25 +30194,25 @@ def test_abandon_release_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.abandon_release(**mock_args) + client.get_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_abandon_release_rest_flattened_error(transport: str = "rest"): +def test_get_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26077,13 +30221,13 @@ def test_abandon_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.abandon_release( - cloud_deploy.AbandonReleaseRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) -def test_abandon_release_rest_error(): +def test_get_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28009,6 +32153,7 @@ def test_create_rollout_rest_required_fields( # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "override_deploy_policy", "request_id", "rollout_id", "starting_phase_id", @@ -28077,6 +32222,7 @@ def test_create_rollout_rest_unset_required_fields(): assert set(unset_fields) == ( set( ( + "overrideDeployPolicy", "requestId", "rolloutId", "startingPhaseId", @@ -33332,6 +37478,11 @@ def test_cloud_deploy_base_transport(): "get_release", "create_release", "abandon_release", + "create_deploy_policy", + "update_deploy_policy", + "delete_deploy_policy", + "list_deploy_policies", + "get_deploy_policy", "approve_rollout", "advance_rollout", "cancel_rollout", @@ -33698,6 +37849,21 @@ def test_cloud_deploy_client_transport_session_collision(transport_name): session1 = client1.transport.abandon_release._session session2 = client2.transport.abandon_release._session assert session1 != session2 + session1 = client1.transport.create_deploy_policy._session + session2 = client2.transport.create_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.update_deploy_policy._session + session2 = client2.transport.update_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.delete_deploy_policy._session + session2 = client2.transport.delete_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.list_deploy_policies._session + session2 = client2.transport.list_deploy_policies._session + assert session1 != session2 + session1 = client1.transport.get_deploy_policy._session + session2 = client2.transport.get_deploy_policy._session + assert session1 != session2 session1 = client1.transport.approve_rollout._session session2 = client2.transport.approve_rollout._session assert session1 != session2 @@ -34105,10 +38271,38 @@ def test_parse_delivery_pipeline_path(): assert expected == actual -def test_job_path(): +def test_deploy_policy_path(): project = "winkle" location = "nautilus" - job = "scallop" + deploy_policy = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + ) + actual = CloudDeployClient.deploy_policy_path(project, location, deploy_policy) + assert expected == actual + + +def test_parse_deploy_policy_path(): + expected = { + "project": "abalone", + "location": "squid", + "deploy_policy": "clam", + } + path = CloudDeployClient.deploy_policy_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_deploy_policy_path(path) + assert expected == actual + + +def test_job_path(): + project = "whelk" + location = "octopus" + job = "oyster" expected = "projects/{project}/locations/{location}/jobs/{job}".format( project=project, location=location, @@ -34120,9 +38314,9 @@ def test_job_path(): def test_parse_job_path(): expected = { - "project": "abalone", - "location": "squid", - "job": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "job": "mussel", } path = CloudDeployClient.job_path(**expected) @@ -34132,12 +38326,12 @@ def test_parse_job_path(): def test_job_run_path(): - project = "whelk" - location = "octopus" - delivery_pipeline = "oyster" - release = "nudibranch" - rollout = "cuttlefish" - job_run = "mussel" + project = "winkle" + location = "nautilus" + delivery_pipeline = "scallop" + release = "abalone" + rollout = "squid" + job_run = "clam" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{job_run}".format( project=project, location=location, @@ -34154,12 +38348,12 @@ def test_job_run_path(): def test_parse_job_run_path(): expected = { - "project": "winkle", - "location": "nautilus", - "delivery_pipeline": "scallop", - "release": "abalone", - "rollout": "squid", - "job_run": "clam", + "project": "whelk", + "location": "octopus", + "delivery_pipeline": "oyster", + "release": "nudibranch", + "rollout": "cuttlefish", + "job_run": "mussel", } path = CloudDeployClient.job_run_path(**expected) @@ -34169,9 +38363,9 @@ def test_parse_job_run_path(): def test_membership_path(): - project = "whelk" - location = "octopus" - membership = "oyster" + project = "winkle" + location = "nautilus" + membership = "scallop" expected = ( "projects/{project}/locations/{location}/memberships/{membership}".format( project=project, @@ -34185,9 +38379,9 @@ def test_membership_path(): def test_parse_membership_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "membership": "mussel", + "project": "abalone", + "location": "squid", + "membership": "clam", } path = CloudDeployClient.membership_path(**expected) @@ -34197,10 +38391,10 @@ def test_parse_membership_path(): def test_release_path(): - project = "winkle" - location = "nautilus" - delivery_pipeline = "scallop" - release = "abalone" + project = "whelk" + location = "octopus" + delivery_pipeline = "oyster" + release = "nudibranch" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( project=project, location=location, @@ -34215,10 +38409,10 @@ def test_release_path(): def test_parse_release_path(): expected = { - "project": "squid", - "location": "clam", - "delivery_pipeline": "whelk", - "release": "octopus", + "project": "cuttlefish", + "location": "mussel", + "delivery_pipeline": "winkle", + "release": "nautilus", } path = CloudDeployClient.release_path(**expected) @@ -34228,10 +38422,10 @@ def test_parse_release_path(): def test_repository_path(): - project = "oyster" - location = "nudibranch" - connection = "cuttlefish" - repository = "mussel" + project = "scallop" + location = "abalone" + connection = "squid" + repository = "clam" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -34246,10 +38440,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "winkle", - "location": "nautilus", - "connection": "scallop", - "repository": "abalone", + "project": "whelk", + "location": "octopus", + "connection": "oyster", + "repository": "nudibranch", } path = CloudDeployClient.repository_path(**expected) @@ -34259,11 +38453,11 @@ def test_parse_repository_path(): def test_rollout_path(): - project = "squid" - location = "clam" - delivery_pipeline = "whelk" - release = "octopus" - rollout = "oyster" + project = "cuttlefish" + location = "mussel" + delivery_pipeline = "winkle" + release = "nautilus" + rollout = "scallop" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, @@ -34279,11 +38473,11 @@ def test_rollout_path(): def test_parse_rollout_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "delivery_pipeline": "mussel", - "release": "winkle", - "rollout": "nautilus", + "project": "abalone", + "location": "squid", + "delivery_pipeline": "clam", + "release": "whelk", + "rollout": "octopus", } path = CloudDeployClient.rollout_path(**expected) @@ -34293,9 +38487,9 @@ def test_parse_rollout_path(): def test_service_path(): - project = "scallop" - location = "abalone" - service = "squid" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -34307,9 +38501,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "clam", - "location": "whelk", - "service": "octopus", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = CloudDeployClient.service_path(**expected) @@ -34319,9 +38513,9 @@ def test_parse_service_path(): def test_target_path(): - project = "oyster" - location = "nudibranch" - target = "cuttlefish" + project = "scallop" + location = "abalone" + target = "squid" expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, @@ -34333,9 +38527,9 @@ def test_target_path(): def test_parse_target_path(): expected = { - "project": "mussel", - "location": "winkle", - "target": "nautilus", + "project": "clam", + "location": "whelk", + "target": "octopus", } path = CloudDeployClient.target_path(**expected) @@ -34345,9 +38539,9 @@ def test_parse_target_path(): def test_worker_pool_path(): - project = "scallop" - location = "abalone" - worker_pool = "squid" + project = "oyster" + location = "nudibranch" + worker_pool = "cuttlefish" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -34361,9 +38555,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "clam", - "location": "whelk", - "worker_pool": "octopus", + "project": "mussel", + "location": "winkle", + "worker_pool": "nautilus", } path = CloudDeployClient.worker_pool_path(**expected) @@ -34373,7 +38567,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -34383,7 +38577,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "abalone", } path = CloudDeployClient.common_billing_account_path(**expected) @@ -34393,7 +38587,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -34403,7 +38597,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "clam", } path = CloudDeployClient.common_folder_path(**expected) @@ -34413,7 +38607,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -34423,7 +38617,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "octopus", } path = CloudDeployClient.common_organization_path(**expected) @@ -34433,7 +38627,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -34443,7 +38637,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nudibranch", } path = CloudDeployClient.common_project_path(**expected) @@ -34453,8 +38647,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -34465,8 +38659,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "winkle", + "location": "nautilus", } path = CloudDeployClient.common_location_path(**expected) From 272130a696a54b5e895c13cf8e1aae3ee4dc0889 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:07:04 -0700 Subject: [PATCH 50/59] chore: Update the root changelog (#13091) Update the root changelog Co-authored-by: ohmayr --- CHANGELOG.md | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1004d55731cc..3c4f1ec4af65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,11 +4,11 @@ Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) - [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-analytics-data==0.18.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-chat==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) - [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) - [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) @@ -26,12 +26,12 @@ Changelogs - [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) - [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) - [google-cloud-artifact-registry==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.26.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-asset==3.26.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) - [google-cloud-assured-workloads==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.27](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.28](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -49,10 +49,10 @@ Changelogs - [google-cloud-billing-budgets==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) - [google-cloud-billing==1.13.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) - [google-cloud-binary-authorization==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.24.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-build==3.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) - [google-cloud-certificate-manager==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) - [google-cloud-channel==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) - [google-cloud-cloudquotas==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) - [google-cloud-commerce-consumer-procurement==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) - [google-cloud-common==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) @@ -72,14 +72,14 @@ Changelogs - [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) - [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-dataproc==5.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) - [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) - [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-dialogflow==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) - [google-cloud-discoveryengine==0.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dlp==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) - [google-cloud-documentai==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) @@ -91,18 +91,18 @@ Changelogs - [google-cloud-eventarc==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) - [google-cloud-filestore==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) - [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gdchardwaremanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) - [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) - [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) - [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) - [google-cloud-iam-logging==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) - [google-cloud-iam==2.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) - [google-cloud-iap==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) - [google-cloud-ids==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) - [google-cloud-kms-inventory==0.2.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==2.24.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-kms==3.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) - [google-cloud-language==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) - [google-cloud-life-sciences==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) - [google-cloud-managed-identities==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) @@ -113,14 +113,15 @@ Changelogs - [google-cloud-monitoring-dashboards==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) - [google-cloud-monitoring-metrics-scopes==1.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) - [google-cloud-monitoring==2.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-netapp==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) - [google-cloud-network-connectivity==2.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) - [google-cloud-network-management==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) - [google-cloud-network-security==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) - [google-cloud-network-services==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) - [google-cloud-notebooks==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) - [google-cloud-optimization==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-oracledatabase==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) - [google-cloud-os-config==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) - [google-cloud-os-login==2.14.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) - [google-cloud-parallelstore==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) @@ -181,11 +182,12 @@ Changelogs - [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-areainsights==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) - [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) - [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) - [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-places==0.1.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) - [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) - [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) - [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) From 14092407b9faffccb6d8db45751a08c4e589bd51 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:12:25 -0700 Subject: [PATCH 51/59] chore: Update release-please config files (#13127) Update release-please config files Co-authored-by: ohmayr --- release-please-config.json | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/release-please-config.json b/release-please-config.json index 7866aa34f750..71fbe6873b37 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1361,11 +1361,6 @@ "path": "samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json", "type": "json" }, - { - "jsonpath": "$.clientLibrary.version", - "path": "samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json", - "type": "json" - }, { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json", @@ -2106,7 +2101,13 @@ "component": "google-cloud-parallelstore", "extra-files": [ "google/cloud/parallelstore/gapic_version.py", + "google/cloud/parallelstore_v1/gapic_version.py", "google/cloud/parallelstore_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json", + "type": "json" + }, { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json", From 852d797f21d4809c32d98b384c60bf9852b14216 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:00:54 -0400 Subject: [PATCH 52/59] feat: [google-cloud-commerce-consumer-procurement] add Order modification RPCs and License Management Service (#13128) BEGIN_COMMIT_OVERRIDE feat: add Order modification RPCs and License Management Service docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_APPROVED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_COMPLETED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_REJECTED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ABANDONED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ACTIVATING` in enum `LineItemChangeState` is changed docs: A comment for field `request_id` in message `.google.cloud.commerce.consumer.procurement.v1.PlaceOrderRequest` is changed docs: A comment for field `filter` in message `.google.cloud.commerce.consumer.procurement.v1.ListOrdersRequest` is changed END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_APPROVED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_COMPLETED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_REJECTED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ABANDONED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ACTIVATING` in enum `LineItemChangeState` is changed docs: A comment for field `request_id` in message `.google.cloud.commerce.consumer.procurement.v1.PlaceOrderRequest` is changed docs: A comment for field `filter` in message `.google.cloud.commerce.consumer.procurement.v1.ListOrdersRequest` is changed PiperOrigin-RevId: 682457622 Source-Link: https://github.com/googleapis/googleapis/commit/1f8352cf46df74d7db6fd544181655c590689b8c Source-Link: https://github.com/googleapis/googleapis-gen/commit/172720068f2259b365a56a22a0c7c2a6d446c732 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbW1lcmNlLWNvbnN1bWVyLXByb2N1cmVtZW50Ly5Pd2xCb3QueWFtbCIsImgiOiIxNzI3MjAwNjhmMjI1OWIzNjVhNTZhMjJhMGM3YzJhNmQ0NDZjNzMyIn0= --------- Co-authored-by: Owl Bot --- .../license_management_service.rst | 10 + .../services_.rst | 1 + .../commerce_consumer_procurement/__init__.py | 42 + .../__init__.py | 40 + .../gapic_metadata.json | 124 + .../async_client.py | 224 + .../consumer_procurement_service/client.py | 220 + .../transports/base.py | 28 + .../transports/grpc.py | 56 + .../transports/grpc_asyncio.py | 70 + .../transports/rest.py | 266 + .../license_management_service/__init__.py | 22 + .../async_client.py | 925 +++ .../license_management_service/client.py | 1339 ++++ .../license_management_service/pagers.py | 207 + .../transports/__init__.py | 41 + .../transports/base.py | 246 + .../transports/grpc.py | 411 ++ .../transports/grpc_asyncio.py | 441 ++ .../transports/rest.py | 958 +++ .../types/__init__.py | 34 + .../types/license_management_service.py | 331 + .../types/order.py | 18 +- .../types/procurement_service.py | 186 +- ..._procurement_service_cancel_order_async.py | 56 + ...r_procurement_service_cancel_order_sync.py | 56 + ..._procurement_service_modify_order_async.py | 56 + ...r_procurement_service_modify_order_sync.py | 56 + ...license_management_service_assign_async.py | 53 + ..._license_management_service_assign_sync.py | 53 + ..._service_enumerate_licensed_users_async.py | 53 + ...t_service_enumerate_licensed_users_sync.py | 53 + ...nagement_service_get_license_pool_async.py | 52 + ...anagement_service_get_license_pool_sync.py | 52 + ...cense_management_service_unassign_async.py | 53 + ...icense_management_service_unassign_sync.py | 53 + ...ement_service_update_license_pool_async.py | 51 + ...gement_service_update_license_pool_sync.py | 51 + ...loud.commerce.consumer.procurement.v1.json | 1177 +++- ...mmerce_consumer_procurement_v1_keywords.py | 7 + .../test_consumer_procurement_service.py | 1776 ++++- .../test_license_management_service.py | 6057 +++++++++++++++++ 42 files changed, 15626 insertions(+), 379 deletions(-) create mode 100644 packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst new file mode 100644 index 000000000000..d08a71e7aec0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst @@ -0,0 +1,10 @@ +LicenseManagementService +------------------------------------------ + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service + :members: + :inherited-members: + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst index d5e8b5f12ecb..8d66166cebbe 100644 --- a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst @@ -4,3 +4,4 @@ Services for Google Cloud Commerce Consumer Procurement v1 API :maxdepth: 2 consumer_procurement_service + license_management_service diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py index f271433c727b..bba3576813f8 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py @@ -24,6 +24,25 @@ from google.cloud.commerce_consumer_procurement_v1.services.consumer_procurement_service.client import ( ConsumerProcurementServiceClient, ) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.async_client import ( + LicenseManagementServiceAsyncClient, +) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.client import ( + LicenseManagementServiceClient, +) +from google.cloud.commerce_consumer_procurement_v1.types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from google.cloud.commerce_consumer_procurement_v1.types.order import ( LineItem, LineItemChange, @@ -36,9 +55,14 @@ Subscription, ) from google.cloud.commerce_consumer_procurement_v1.types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) @@ -46,6 +70,19 @@ __all__ = ( "ConsumerProcurementServiceClient", "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -55,9 +92,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py index d1a4fa34b7d8..2a6c2e07c4ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py @@ -24,6 +24,23 @@ ConsumerProcurementServiceAsyncClient, ConsumerProcurementServiceClient, ) +from .services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, +) +from .types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .types.order import ( LineItem, LineItemChange, @@ -36,17 +53,35 @@ Subscription, ) from .types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceAsyncClient", + "AssignRequest", + "AssignResponse", + "AssignmentProtocol", + "AutoRenewalBehavior", + "CancelOrderMetadata", + "CancelOrderRequest", "ConsumerProcurementServiceClient", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", "GetOrderRequest", + "LicenseManagementServiceClient", + "LicensePool", + "LicensedUser", "LineItem", "LineItemChange", "LineItemChangeState", @@ -55,9 +90,14 @@ "LineItemInfo", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "Order", "Parameter", "PlaceOrderMetadata", "PlaceOrderRequest", "Subscription", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json index 638c161ad386..e11a84a7c315 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -20,6 +25,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -30,6 +40,11 @@ "grpc-async": { "libraryClient": "ConsumerProcurementServiceAsyncClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -40,6 +55,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -50,6 +70,11 @@ "rest": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -60,6 +85,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -68,6 +98,100 @@ } } } + }, + "LicenseManagementService": { + "clients": { + "grpc": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LicenseManagementServiceAsyncClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "rest": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + } + } } } } diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py index ba83a537babe..4b4132787111 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py @@ -658,6 +658,230 @@ async def sample_list_orders(): # Done; return the response. return response + async def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + async def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + async def get_operation( self, request: Optional[operations_pb2.GetOperationRequest] = None, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index be7ec242cec1..525ad9877370 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -1082,6 +1082,226 @@ def sample_list_orders(): # Done; return the response. return response + def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConsumerProcurementServiceClient": return self diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py index 405ae9789b33..b7df5231e9d5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py @@ -167,6 +167,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -213,6 +223,24 @@ def list_orders( ]: raise NotImplementedError() + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py index 25e976eeed36..307d720364fa 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py @@ -353,6 +353,62 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py index fb34a2b76187..0cdba90bcd6f 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py @@ -366,6 +366,66 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -402,6 +462,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method_async.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method_async.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py index 353e9db69b53..16459934f854 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py @@ -74,6 +74,14 @@ class ConsumerProcurementServiceRestInterceptor: .. code-block:: python class MyCustomConsumerProcurementServiceInterceptor(ConsumerProcurementServiceRestInterceptor): + def pre_cancel_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +98,14 @@ def post_list_orders(self, response): logging.log(f"Received response: {response}") return response + def pre_modify_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_modify_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_place_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +120,29 @@ def post_place_order(self, response): """ + def pre_cancel_order( + self, + request: procurement_service.CancelOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.CancelOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_cancel_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_get_order( self, request: procurement_service.GetOrderRequest, @@ -148,6 +187,29 @@ def post_list_orders( """ return response + def pre_modify_order( + self, + request: procurement_service.ModifyOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.ModifyOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for modify_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_modify_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for modify_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_place_order( self, request: procurement_service.PlaceOrderRequest, @@ -333,6 +395,100 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("CancelOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.CancelOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel order method over HTTP. + + Args: + request (~.procurement_service.CancelOrderRequest): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_order(request, metadata) + pb_request = procurement_service.CancelOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_order(resp) + return resp + class _GetOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("GetOrder") @@ -525,6 +681,100 @@ def __call__( resp = self._interceptor.post_list_orders(resp) return resp + class _ModifyOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("ModifyOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.ModifyOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the modify order method over HTTP. + + Args: + request (~.procurement_service.ModifyOrderRequest): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:modify", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_modify_order(request, metadata) + pb_request = procurement_service.ModifyOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_modify_order(resp) + return resp + class _PlaceOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("PlaceOrder") @@ -619,6 +869,14 @@ def __call__( resp = self._interceptor.post_place_order(resp) return resp + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelOrder(self._session, self._host, self._interceptor) # type: ignore + @property def get_order(self) -> Callable[[procurement_service.GetOrderRequest], order.Order]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -635,6 +893,14 @@ def list_orders( # In C++ this would require a dynamic_cast return self._ListOrders(self._session, self._host, self._interceptor) # type: ignore + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyOrder(self._session, self._host, self._interceptor) # type: ignore + @property def place_order( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py new file mode 100644 index 000000000000..d1b19f8f83bc --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import LicenseManagementServiceAsyncClient +from .client import LicenseManagementServiceClient + +__all__ = ( + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", +) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py new file mode 100644 index 000000000000..015a76c88aef --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py @@ -0,0 +1,925 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .client import LicenseManagementServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport + + +class LicenseManagementServiceAsyncClient: + """Service for managing licenses.""" + + _client: LicenseManagementServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = LicenseManagementServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = LicenseManagementServiceClient._DEFAULT_UNIVERSE + + license_pool_path = staticmethod(LicenseManagementServiceClient.license_pool_path) + parse_license_pool_path = staticmethod( + LicenseManagementServiceClient.parse_license_pool_path + ) + common_billing_account_path = staticmethod( + LicenseManagementServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LicenseManagementServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(LicenseManagementServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + LicenseManagementServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + LicenseManagementServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + LicenseManagementServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + LicenseManagementServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + LicenseManagementServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + LicenseManagementServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + LicenseManagementServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_info.__func__(LicenseManagementServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_file.__func__(LicenseManagementServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LicenseManagementServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LicenseManagementServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LicenseManagementServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]]): + The request object. Request message for getting a license + pool. + name (:class:`str`): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]]): + The request object. Request message for updating a + license pool. + license_pool (:class:`google.cloud.commerce_consumer_procurement_v1.types.LicensePool`): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersAsyncPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enumerate_licensed_users + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.EnumerateLicensedUsersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "LicenseManagementServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceAsyncClient",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py new file mode 100644 index 000000000000..5c8aee6b83e5 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc import LicenseManagementServiceGrpcTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .transports.rest import LicenseManagementServiceRestTransport + + +class LicenseManagementServiceClientMeta(type): + """Metaclass for the LicenseManagementService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LicenseManagementServiceTransport]] + _transport_registry["grpc"] = LicenseManagementServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LicenseManagementServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LicenseManagementServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LicenseManagementServiceClient(metaclass=LicenseManagementServiceClientMeta): + """Service for managing licenses.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "cloudcommerceconsumerprocurement.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "cloudcommerceconsumerprocurement.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def license_pool_path( + billing_account: str, + order: str, + ) -> str: + """Returns a fully-qualified license_pool string.""" + return "billingAccounts/{billing_account}/orders/{order}/licensePool".format( + billing_account=billing_account, + order=order, + ) + + @staticmethod + def parse_license_pool_path(path: str) -> Dict[str, str]: + """Parses a license_pool path into its component segments.""" + m = re.match( + r"^billingAccounts/(?P.+?)/orders/(?P.+?)/licensePool$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LicenseManagementServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or LicenseManagementServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = LicenseManagementServiceClient._read_environment_variables() + self._client_cert_source = ( + LicenseManagementServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = LicenseManagementServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, LicenseManagementServiceTransport) + if transport_provided: + # transport is a LicenseManagementServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(LicenseManagementServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or LicenseManagementServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[LicenseManagementServiceTransport], + Callable[..., LicenseManagementServiceTransport], + ] = ( + LicenseManagementServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LicenseManagementServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]): + The request object. Request message for getting a license + pool. + name (str): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_license_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]): + The request object. Request message for updating a + license pool. + license_pool (google.cloud.commerce_consumer_procurement_v1.types.LicensePool): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_license_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames is not None: + request.usernames = usernames + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames is not None: + request.usernames = usernames + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enumerate_licensed_users] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.EnumerateLicensedUsersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LicenseManagementServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceClient",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py new file mode 100644 index 000000000000..c2d5464940a4 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + + +class EnumerateLicensedUsersPager: + """A pager for iterating through ``enumerate_licensed_users`` requests. + + This class thinly wraps an initial + :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``licensed_users`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``EnumerateLicensedUsers`` requests and continue to iterate + through the ``licensed_users`` field on the + corresponding responses. + + All the usual :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., license_management_service.EnumerateLicensedUsersResponse + ], + request: license_management_service.EnumerateLicensedUsersRequest, + response: license_management_service.EnumerateLicensedUsersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest): + The initial request object. + response (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = license_management_service.EnumerateLicensedUsersRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[license_management_service.EnumerateLicensedUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[license_management_service.LicensedUser]: + for page in self.pages: + yield from page.licensed_users + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class EnumerateLicensedUsersAsyncPager: + """A pager for iterating through ``enumerate_licensed_users`` requests. + + This class thinly wraps an initial + :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``licensed_users`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``EnumerateLicensedUsers`` requests and continue to iterate + through the ``licensed_users`` field on the + corresponding responses. + + All the usual :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[license_management_service.EnumerateLicensedUsersResponse] + ], + request: license_management_service.EnumerateLicensedUsersRequest, + response: license_management_service.EnumerateLicensedUsersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest): + The initial request object. + response (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = license_management_service.EnumerateLicensedUsersRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[license_management_service.EnumerateLicensedUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[license_management_service.LicensedUser]: + async def async_generator(): + async for page in self.pages: + for response in page.licensed_users: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py new file mode 100644 index 000000000000..cdaddef08466 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LicenseManagementServiceTransport +from .grpc import LicenseManagementServiceGrpcTransport +from .grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .rest import ( + LicenseManagementServiceRestInterceptor, + LicenseManagementServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[LicenseManagementServiceTransport]] +_transport_registry["grpc"] = LicenseManagementServiceGrpcTransport +_transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport +_transport_registry["rest"] = LicenseManagementServiceRestTransport + +__all__ = ( + "LicenseManagementServiceTransport", + "LicenseManagementServiceGrpcTransport", + "LicenseManagementServiceGrpcAsyncIOTransport", + "LicenseManagementServiceRestTransport", + "LicenseManagementServiceRestInterceptor", +) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py new file mode 100644 index 000000000000..416fb9788d4c --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class LicenseManagementServiceTransport(abc.ABC): + """Abstract transport class for LicenseManagementService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudcommerceconsumerprocurement.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_license_pool: gapic_v1.method.wrap_method( + self.get_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.update_license_pool: gapic_v1.method.wrap_method( + self.update_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.assign: gapic_v1.method.wrap_method( + self.assign, + default_timeout=None, + client_info=client_info, + ), + self.unassign: gapic_v1.method.wrap_method( + self.unassign, + default_timeout=None, + client_info=client_info, + ), + self.enumerate_licensed_users: gapic_v1.method.wrap_method( + self.enumerate_licensed_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + Union[ + license_management_service.LicensePool, + Awaitable[license_management_service.LicensePool], + ], + ]: + raise NotImplementedError() + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + Union[ + license_management_service.LicensePool, + Awaitable[license_management_service.LicensePool], + ], + ]: + raise NotImplementedError() + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + Union[ + license_management_service.AssignResponse, + Awaitable[license_management_service.AssignResponse], + ], + ]: + raise NotImplementedError() + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + Union[ + license_management_service.UnassignResponse, + Awaitable[license_management_service.UnassignResponse], + ], + ]: + raise NotImplementedError() + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + Union[ + license_management_service.EnumerateLicensedUsersResponse, + Awaitable[license_management_service.EnumerateLicensedUsersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("LicenseManagementServiceTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py new file mode 100644 index 000000000000..ff5a937a6e70 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport + + +class LicenseManagementServiceGrpcTransport(LicenseManagementServiceTransport): + """gRPC backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + license_management_service.LicensePool, + ]: + r"""Return a callable for the get license pool method over gRPC. + + Gets the license pool. + + Returns: + Callable[[~.GetLicensePoolRequest], + ~.LicensePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_license_pool" not in self._stubs: + self._stubs["get_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/GetLicensePool", + request_serializer=license_management_service.GetLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["get_license_pool"] + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + license_management_service.LicensePool, + ]: + r"""Return a callable for the update license pool method over gRPC. + + Updates the license pool if one exists for this + Order. + + Returns: + Callable[[~.UpdateLicensePoolRequest], + ~.LicensePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_license_pool" not in self._stubs: + self._stubs["update_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/UpdateLicensePool", + request_serializer=license_management_service.UpdateLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["update_license_pool"] + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + license_management_service.AssignResponse, + ]: + r"""Return a callable for the assign method over gRPC. + + Assigns a license to a user. + + Returns: + Callable[[~.AssignRequest], + ~.AssignResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "assign" not in self._stubs: + self._stubs["assign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Assign", + request_serializer=license_management_service.AssignRequest.serialize, + response_deserializer=license_management_service.AssignResponse.deserialize, + ) + return self._stubs["assign"] + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + license_management_service.UnassignResponse, + ]: + r"""Return a callable for the unassign method over gRPC. + + Unassigns a license from a user. + + Returns: + Callable[[~.UnassignRequest], + ~.UnassignResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unassign" not in self._stubs: + self._stubs["unassign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Unassign", + request_serializer=license_management_service.UnassignRequest.serialize, + response_deserializer=license_management_service.UnassignResponse.deserialize, + ) + return self._stubs["unassign"] + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + license_management_service.EnumerateLicensedUsersResponse, + ]: + r"""Return a callable for the enumerate licensed users method over gRPC. + + Enumerates all users assigned a license. + + Returns: + Callable[[~.EnumerateLicensedUsersRequest], + ~.EnumerateLicensedUsersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enumerate_licensed_users" not in self._stubs: + self._stubs["enumerate_licensed_users"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/EnumerateLicensedUsers", + request_serializer=license_management_service.EnumerateLicensedUsersRequest.serialize, + response_deserializer=license_management_service.EnumerateLicensedUsersResponse.deserialize, + ) + return self._stubs["enumerate_licensed_users"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("LicenseManagementServiceGrpcTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..77f1f9a02681 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .grpc import LicenseManagementServiceGrpcTransport + + +class LicenseManagementServiceGrpcAsyncIOTransport(LicenseManagementServiceTransport): + """gRPC AsyncIO backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + Awaitable[license_management_service.LicensePool], + ]: + r"""Return a callable for the get license pool method over gRPC. + + Gets the license pool. + + Returns: + Callable[[~.GetLicensePoolRequest], + Awaitable[~.LicensePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_license_pool" not in self._stubs: + self._stubs["get_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/GetLicensePool", + request_serializer=license_management_service.GetLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["get_license_pool"] + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + Awaitable[license_management_service.LicensePool], + ]: + r"""Return a callable for the update license pool method over gRPC. + + Updates the license pool if one exists for this + Order. + + Returns: + Callable[[~.UpdateLicensePoolRequest], + Awaitable[~.LicensePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_license_pool" not in self._stubs: + self._stubs["update_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/UpdateLicensePool", + request_serializer=license_management_service.UpdateLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["update_license_pool"] + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + Awaitable[license_management_service.AssignResponse], + ]: + r"""Return a callable for the assign method over gRPC. + + Assigns a license to a user. + + Returns: + Callable[[~.AssignRequest], + Awaitable[~.AssignResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "assign" not in self._stubs: + self._stubs["assign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Assign", + request_serializer=license_management_service.AssignRequest.serialize, + response_deserializer=license_management_service.AssignResponse.deserialize, + ) + return self._stubs["assign"] + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + Awaitable[license_management_service.UnassignResponse], + ]: + r"""Return a callable for the unassign method over gRPC. + + Unassigns a license from a user. + + Returns: + Callable[[~.UnassignRequest], + Awaitable[~.UnassignResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unassign" not in self._stubs: + self._stubs["unassign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Unassign", + request_serializer=license_management_service.UnassignRequest.serialize, + response_deserializer=license_management_service.UnassignResponse.deserialize, + ) + return self._stubs["unassign"] + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + Awaitable[license_management_service.EnumerateLicensedUsersResponse], + ]: + r"""Return a callable for the enumerate licensed users method over gRPC. + + Enumerates all users assigned a license. + + Returns: + Callable[[~.EnumerateLicensedUsersRequest], + Awaitable[~.EnumerateLicensedUsersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enumerate_licensed_users" not in self._stubs: + self._stubs["enumerate_licensed_users"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/EnumerateLicensedUsers", + request_serializer=license_management_service.EnumerateLicensedUsersRequest.serialize, + response_deserializer=license_management_service.EnumerateLicensedUsersResponse.deserialize, + ) + return self._stubs["enumerate_licensed_users"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_license_pool: gapic_v1.method_async.wrap_method( + self.get_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.update_license_pool: gapic_v1.method_async.wrap_method( + self.update_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.assign: gapic_v1.method_async.wrap_method( + self.assign, + default_timeout=None, + client_info=client_info, + ), + self.unassign: gapic_v1.method_async.wrap_method( + self.unassign, + default_timeout=None, + client_info=client_info, + ), + self.enumerate_licensed_users: gapic_v1.method_async.wrap_method( + self.enumerate_licensed_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + +__all__ = ("LicenseManagementServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py new file mode 100644 index 000000000000..e2e5093c1b3a --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py @@ -0,0 +1,958 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import LicenseManagementServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LicenseManagementServiceRestInterceptor: + """Interceptor for LicenseManagementService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LicenseManagementServiceRestTransport. + + .. code-block:: python + class MyCustomLicenseManagementServiceInterceptor(LicenseManagementServiceRestInterceptor): + def pre_assign(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_assign(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enumerate_licensed_users(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enumerate_licensed_users(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_license_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_license_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_unassign(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_unassign(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_license_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_license_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LicenseManagementServiceRestTransport(interceptor=MyCustomLicenseManagementServiceInterceptor()) + client = LicenseManagementServiceClient(transport=transport) + + + """ + + def pre_assign( + self, + request: license_management_service.AssignRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[license_management_service.AssignRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for assign + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_assign( + self, response: license_management_service.AssignResponse + ) -> license_management_service.AssignResponse: + """Post-rpc interceptor for assign + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_enumerate_licensed_users( + self, + request: license_management_service.EnumerateLicensedUsersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.EnumerateLicensedUsersRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for enumerate_licensed_users + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_enumerate_licensed_users( + self, response: license_management_service.EnumerateLicensedUsersResponse + ) -> license_management_service.EnumerateLicensedUsersResponse: + """Post-rpc interceptor for enumerate_licensed_users + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_get_license_pool( + self, + request: license_management_service.GetLicensePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.GetLicensePoolRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_license_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_get_license_pool( + self, response: license_management_service.LicensePool + ) -> license_management_service.LicensePool: + """Post-rpc interceptor for get_license_pool + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_unassign( + self, + request: license_management_service.UnassignRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[license_management_service.UnassignRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for unassign + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_unassign( + self, response: license_management_service.UnassignResponse + ) -> license_management_service.UnassignResponse: + """Post-rpc interceptor for unassign + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_update_license_pool( + self, + request: license_management_service.UpdateLicensePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.UpdateLicensePoolRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_license_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_update_license_pool( + self, response: license_management_service.LicensePool + ) -> license_management_service.LicensePool: + """Post-rpc interceptor for update_license_pool + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LicenseManagementServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LicenseManagementServiceRestInterceptor + + +class LicenseManagementServiceRestTransport(LicenseManagementServiceTransport): + """REST backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[LicenseManagementServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LicenseManagementServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Assign(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("Assign") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.AssignRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Call the assign method over HTTP. + + Args: + request (~.license_management_service.AssignRequest): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:assign", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_assign(request, metadata) + pb_request = license_management_service.AssignRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.AssignResponse() + pb_resp = license_management_service.AssignResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_assign(resp) + return resp + + class _EnumerateLicensedUsers(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("EnumerateLicensedUsers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.EnumerateLicensedUsersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.EnumerateLicensedUsersResponse: + r"""Call the enumerate licensed users method over HTTP. + + Args: + request (~.license_management_service.EnumerateLicensedUsersRequest): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.EnumerateLicensedUsersResponse: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:enumerateLicensedUsers", + }, + ] + request, metadata = self._interceptor.pre_enumerate_licensed_users( + request, metadata + ) + pb_request = license_management_service.EnumerateLicensedUsersRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.EnumerateLicensedUsersResponse() + pb_resp = license_management_service.EnumerateLicensedUsersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enumerate_licensed_users(resp) + return resp + + class _GetLicensePool(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("GetLicensePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.GetLicensePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Call the get license pool method over HTTP. + + Args: + request (~.license_management_service.GetLicensePoolRequest): + The request object. Request message for getting a license + pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=billingAccounts/*/orders/*/licensePool}", + }, + ] + request, metadata = self._interceptor.pre_get_license_pool( + request, metadata + ) + pb_request = license_management_service.GetLicensePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.LicensePool() + pb_resp = license_management_service.LicensePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_license_pool(resp) + return resp + + class _Unassign(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("Unassign") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.UnassignRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Call the unassign method over HTTP. + + Args: + request (~.license_management_service.UnassignRequest): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:unassign", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_unassign(request, metadata) + pb_request = license_management_service.UnassignRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.UnassignResponse() + pb_resp = license_management_service.UnassignResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_unassign(resp) + return resp + + class _UpdateLicensePool(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("UpdateLicensePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.UpdateLicensePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Call the update license pool method over HTTP. + + Args: + request (~.license_management_service.UpdateLicensePoolRequest): + The request object. Request message for updating a + license pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{license_pool.name=billingAccounts/*/orders/*/licensePool/*}", + "body": "license_pool", + }, + ] + request, metadata = self._interceptor.pre_update_license_pool( + request, metadata + ) + pb_request = license_management_service.UpdateLicensePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.LicensePool() + pb_resp = license_management_service.LicensePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_license_pool(resp) + return resp + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + license_management_service.AssignResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Assign(self._session, self._host, self._interceptor) # type: ignore + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + license_management_service.EnumerateLicensedUsersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnumerateLicensedUsers(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + license_management_service.LicensePool, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLicensePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + license_management_service.UnassignResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Unassign(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + license_management_service.LicensePool, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLicensePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(LicenseManagementServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=billingAccounts/*/orders/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("LicenseManagementServiceRestTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py index cc4943418819..4e35259c31e1 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py @@ -13,6 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .order import ( LineItem, LineItemChange, @@ -25,14 +38,30 @@ Subscription, ) from .procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -42,9 +71,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py new file mode 100644 index 000000000000..4b13230f4ea8 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.commerce.consumer.procurement.v1", + manifest={ + "AssignmentProtocol", + "LicensePool", + "GetLicensePoolRequest", + "UpdateLicensePoolRequest", + "AssignRequest", + "AssignResponse", + "UnassignRequest", + "UnassignResponse", + "EnumerateLicensedUsersRequest", + "LicensedUser", + "EnumerateLicensedUsersResponse", + }, +) + + +class AssignmentProtocol(proto.Message): + r"""Assignment protocol for a license pool. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manual_assignment_type (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol.ManualAssignmentType): + Allow manual assignments triggered by + administrative operations only. + + This field is a member of `oneof`_ ``assignment_type``. + auto_assignment_type (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol.AutoAssignmentType): + Allow automatic assignments triggered by data + plane operations. + + This field is a member of `oneof`_ ``assignment_type``. + """ + + class ManualAssignmentType(proto.Message): + r"""Allow manual assignments triggered by administrative + operations only. + + """ + + class AutoAssignmentType(proto.Message): + r"""Configuration for automatic assignments handled by data plane + operations. + + Attributes: + inactive_license_ttl (google.protobuf.duration_pb2.Duration): + Optional. The time to live for an inactive + license. After this time has passed, the license + will be automatically unassigned from the user. + Must be at least 7 days, if set. If unset, the + license will never expire. + """ + + inactive_license_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + manual_assignment_type: ManualAssignmentType = proto.Field( + proto.MESSAGE, + number=2, + oneof="assignment_type", + message=ManualAssignmentType, + ) + auto_assignment_type: AutoAssignmentType = proto.Field( + proto.MESSAGE, + number=3, + oneof="assignment_type", + message=AutoAssignmentType, + ) + + +class LicensePool(proto.Message): + r"""A license pool represents a pool of licenses that can be + assigned to users. + + Attributes: + name (str): + Identifier. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + license_assignment_protocol (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol): + Required. Assignment protocol for the license + pool. + available_license_count (int): + Output only. Licenses count that are + available to be assigned. + total_license_count (int): + Output only. Total number of licenses in the + pool. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + license_assignment_protocol: "AssignmentProtocol" = proto.Field( + proto.MESSAGE, + number=2, + message="AssignmentProtocol", + ) + available_license_count: int = proto.Field( + proto.INT32, + number=3, + ) + total_license_count: int = proto.Field( + proto.INT32, + number=4, + ) + + +class GetLicensePoolRequest(proto.Message): + r"""Request message for getting a license pool. + + Attributes: + name (str): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateLicensePoolRequest(proto.Message): + r"""Request message for updating a license pool. + + Attributes: + license_pool (google.cloud.commerce_consumer_procurement_v1.types.LicensePool): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + license_pool: "LicensePool" = proto.Field( + proto.MESSAGE, + number=1, + message="LicensePool", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class AssignRequest(proto.Message): + r"""Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + Attributes: + parent (str): + Required. License pool name. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + usernames: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class AssignResponse(proto.Message): + r"""Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + + +class UnassignRequest(proto.Message): + r"""Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + Attributes: + parent (str): + Required. License pool name. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + usernames: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class UnassignResponse(proto.Message): + r"""Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + + +class EnumerateLicensedUsersRequest(proto.Message): + r"""Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Attributes: + parent (str): + Required. License pool name. + page_size (int): + Optional. The maximum number of users to + return. The service may return fewer than this + value. + page_token (str): + Optional. A page token, received from a previous + ``EnumerateLicensedUsers`` call. Provide this to retrieve + the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LicensedUser(proto.Message): + r"""A licensed user. + + Attributes: + username (str): + Username. Format: ``name@domain.com``. + assign_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the license was + assigned. + recent_usage_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the license was + recently used. This may not be the most recent + usage time, and will be updated regularly + (within 24 hours). + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + assign_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + recent_usage_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class EnumerateLicensedUsersResponse(proto.Message): + r"""Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Attributes: + licensed_users (MutableSequence[google.cloud.commerce_consumer_procurement_v1.types.LicensedUser]): + The list of licensed users. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + licensed_users: MutableSequence["LicensedUser"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LicensedUser", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py index 70bce4301daf..f22c139cd202 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py @@ -68,7 +68,8 @@ class LineItemChangeState(proto.Enum): Sentinel value. Do not use. LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL (1): Change is in this state when a change is - initiated and waiting for partner approval. + initiated and waiting for partner approval. This + state is only applicable for pending change. LINE_ITEM_CHANGE_STATE_APPROVED (2): Change is in this state after it's approved by the partner or auto-approved but before it @@ -76,21 +77,26 @@ class LineItemChangeState(proto.Enum): cancelled depending on the new line item info property (pending Private Offer change cannot be cancelled and can only be overwritten by another - Private Offer). + Private Offer). This state is only applicable + for pending change. LINE_ITEM_CHANGE_STATE_COMPLETED (3): Change is in this state after it's been - activated. + activated. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_REJECTED (4): Change is in this state if it was rejected by - the partner. + the partner. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_ABANDONED (5): Change is in this state if it was abandoned - by the user. + by the user. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_ACTIVATING (6): Change is in this state if it's currently being provisioned downstream. The change can't be overwritten or cancelled when it's in this - state. + state. This state is only applicable for pending + change. """ LINE_ITEM_CHANGE_STATE_UNSPECIFIED = 0 LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL = 1 diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py index 3a9de30378b8..93c2b0b600ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py @@ -24,15 +24,39 @@ __protobuf__ = proto.module( package="google.cloud.commerce.consumer.procurement.v1", manifest={ + "AutoRenewalBehavior", "PlaceOrderRequest", "PlaceOrderMetadata", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderRequest", + "ModifyOrderMetadata", + "CancelOrderRequest", + "CancelOrderMetadata", }, ) +class AutoRenewalBehavior(proto.Enum): + r"""Indicates the auto renewal behavior customer specifies on + subscription. + + Values: + AUTO_RENEWAL_BEHAVIOR_UNSPECIFIED (0): + If unspecified, the auto renewal behavior + will follow the default config. + AUTO_RENEWAL_BEHAVIOR_ENABLE (1): + Auto Renewal will be enabled on subscription. + AUTO_RENEWAL_BEHAVIOR_DISABLE (2): + Auto Renewal will be disabled on + subscription. + """ + AUTO_RENEWAL_BEHAVIOR_UNSPECIFIED = 0 + AUTO_RENEWAL_BEHAVIOR_ENABLE = 1 + AUTO_RENEWAL_BEHAVIOR_DISABLE = 2 + + class PlaceOrderRequest(proto.Message): r"""Request message for [ConsumerProcurementService.PlaceOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder]. @@ -50,7 +74,7 @@ class PlaceOrderRequest(proto.Message): request_id (str): Optional. A unique identifier for this request. The server will ignore subsequent requests that provide a duplicate - request ID for at least 120 minutes after the first request. + request ID for at least 24 hours after the first request. The request ID must be a valid `UUID `__. @@ -176,4 +200,164 @@ def raw_page(self): ) +class ModifyOrderRequest(proto.Message): + r"""Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + + Attributes: + name (str): + Required. Name of the order to update. + modifications (MutableSequence[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest.Modification]): + Optional. Modifications for an existing Order + created by an Offer. Required when Offer based + Order is being modified, except for when going + from an offer to a public plan. + display_name (str): + Optional. Updated display name of the order, + leave as empty if you do not want to update + current display name. + etag (str): + Optional. The weak etag, which can be + optionally populated, of the order that this + modify request is based on. Validation checking + will only happen if the invoker supplies this + field. + """ + + class Modification(proto.Message): + r"""Modifications to make on the order. + + Attributes: + line_item_id (str): + Required. ID of the existing line item to make change to. + Required when change type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE] or + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_CANCEL]. + change_type (google.cloud.commerce_consumer_procurement_v1.types.LineItemChangeType): + Required. Type of change to make. + new_line_item_info (google.cloud.commerce_consumer_procurement_v1.types.LineItemInfo): + Optional. The line item to update to. Required when + change_type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_CREATE] or + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE]. + auto_renewal_behavior (google.cloud.commerce_consumer_procurement_v1.types.AutoRenewalBehavior): + Optional. Auto renewal behavior of the subscription for the + update. Applied when change_type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE]. Follows + plan default config when this field is not specified. + """ + + line_item_id: str = proto.Field( + proto.STRING, + number=1, + ) + change_type: order.LineItemChangeType = proto.Field( + proto.ENUM, + number=2, + enum=order.LineItemChangeType, + ) + new_line_item_info: order.LineItemInfo = proto.Field( + proto.MESSAGE, + number=3, + message=order.LineItemInfo, + ) + auto_renewal_behavior: "AutoRenewalBehavior" = proto.Field( + proto.ENUM, + number=4, + enum="AutoRenewalBehavior", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + modifications: MutableSequence[Modification] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=Modification, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ModifyOrderMetadata(proto.Message): + r"""Message stored in the metadata field of the Operation returned by + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + + """ + + +class CancelOrderRequest(proto.Message): + r"""Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + + Attributes: + name (str): + Required. The resource name of the order. + etag (str): + Optional. The weak etag, which can be + optionally populated, of the order that this + cancel request is based on. Validation checking + will only happen if the invoker supplies this + field. + cancellation_policy (google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest.CancellationPolicy): + Optional. Cancellation policy of this + request. + """ + + class CancellationPolicy(proto.Enum): + r"""Indicates the cancellation policy the customer uses to cancel + the order. + + Values: + CANCELLATION_POLICY_UNSPECIFIED (0): + If unspecified, cancellation will try to + cancel the order, if order cannot be immediately + cancelled, auto renewal will be turned off. + However, caller should avoid using the value as + it will yield a non-deterministic result. This + is still supported mainly to maintain existing + integrated usages and ensure backwards + compatibility. + CANCELLATION_POLICY_CANCEL_IMMEDIATELY (1): + Request will cancel the whole order + immediately, if order cannot be immediately + cancelled, the request will fail. + CANCELLATION_POLICY_CANCEL_AT_TERM_END (2): + Request will cancel the auto renewal, if + order is not subscription based, the request + will fail. + """ + CANCELLATION_POLICY_UNSPECIFIED = 0 + CANCELLATION_POLICY_CANCEL_IMMEDIATELY = 1 + CANCELLATION_POLICY_CANCEL_AT_TERM_END = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + cancellation_policy: CancellationPolicy = proto.Field( + proto.ENUM, + number=3, + enum=CancellationPolicy, + ) + + +class CancelOrderMetadata(proto.Message): + r"""Message stored in the metadata field of the Operation returned by + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py new file mode 100644 index 000000000000..6af6267a3dfe --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py new file mode 100644 index 000000000000..d107e654d209 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py new file mode 100644 index 000000000000..d7995349fbc1 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py new file mode 100644 index 000000000000..7d2280e2367b --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py new file mode 100644 index 000000000000..2c5e4b37f6e4 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Assign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py new file mode 100644 index 000000000000..ec7ea6ea12bf --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Assign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.assign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py new file mode 100644 index 000000000000..735a8c855ace --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnumerateLicensedUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py new file mode 100644 index 000000000000..ae35b74ee487 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnumerateLicensedUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py new file mode 100644 index 000000000000..ca51289633c0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py new file mode 100644 index 000000000000..235e9b739fa0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py new file mode 100644 index 000000000000..453d0136af27 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Unassign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py new file mode 100644 index 000000000000..22d3cfb4d955 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Unassign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.unassign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py new file mode 100644 index 000000000000..9f6375daf58e --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py new file mode 100644 index 000000000000..8eff8c5b8d83 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = client.update_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json index 08bfac2f3eb3..e4fae2067939 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json @@ -11,6 +11,159 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", + "shortName": "ConsumerProcurementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.cancel_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "CancelOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_order" + }, + "description": "Sample for CancelOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", + "shortName": "ConsumerProcurementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.cancel_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "CancelOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_order" + }, + "description": "Sample for CancelOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py" + }, { "canonical": true, "clientMethod": { @@ -341,19 +494,19 @@ "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", "shortName": "ConsumerProcurementServiceAsyncClient" }, - "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.place_order", + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.modify_order", "method": { - "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder", "service": { "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", "shortName": "ConsumerProcurementService" }, - "shortName": "PlaceOrder" + "shortName": "ModifyOrder" }, "parameters": [ { "name": "request", - "type": "google.cloud.commerce_consumer_procurement_v1.types.PlaceOrderRequest" + "type": "google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest" }, { "name": "retry", @@ -369,21 +522,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "place_order" + "shortName": "modify_order" }, - "description": "Sample for PlaceOrder", - "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py", + "description": "Sample for ModifyOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_async", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -393,22 +546,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py" + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py" }, { "canonical": true, @@ -417,7 +570,84 @@ "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", "shortName": "ConsumerProcurementServiceClient" }, - "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.place_order", + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.modify_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "ModifyOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "modify_order" + }, + "description": "Sample for ModifyOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", + "shortName": "ConsumerProcurementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.place_order", "method": { "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", "service": { @@ -444,14 +674,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", + "resultType": "google.api_core.operation_async.AsyncOperation", "shortName": "place_order" }, "description": "Sample for PlaceOrder", - "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_sync", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_async", "segments": [ { "end": 56, @@ -484,7 +714,912 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py" + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", + "shortName": "ConsumerProcurementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.place_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "PlaceOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.PlaceOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "place_order" + }, + "description": "Sample for PlaceOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.assign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Assign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.AssignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.AssignResponse", + "shortName": "assign" + }, + "description": "Sample for Assign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.assign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Assign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.AssignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.AssignResponse", + "shortName": "assign" + }, + "description": "Sample for Assign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.enumerate_licensed_users", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "EnumerateLicensedUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager", + "shortName": "enumerate_licensed_users" + }, + "description": "Sample for EnumerateLicensedUsers", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.enumerate_licensed_users", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "EnumerateLicensedUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersPager", + "shortName": "enumerate_licensed_users" + }, + "description": "Sample for EnumerateLicensedUsers", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.get_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.GetLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "GetLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "get_license_pool" + }, + "description": "Sample for GetLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.get_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.GetLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "GetLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "get_license_pool" + }, + "description": "Sample for GetLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.unassign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Unassign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse", + "shortName": "unassign" + }, + "description": "Sample for Unassign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.unassign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Unassign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse", + "shortName": "unassign" + }, + "description": "Sample for Unassign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.update_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.UpdateLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "UpdateLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest" + }, + { + "name": "license_pool", + "type": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "update_license_pool" + }, + "description": "Sample for UpdateLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.update_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.UpdateLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "UpdateLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest" + }, + { + "name": "license_pool", + "type": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "update_license_pool" + }, + "description": "Sample for UpdateLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py" } ] } diff --git a/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py b/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py index 940f6a1151c1..dea0ce76511c 100644 --- a/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py +++ b/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py @@ -39,9 +39,16 @@ def partition( class commerce_consumer_procurementCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'assign': ('parent', 'usernames', ), + 'cancel_order': ('name', 'etag', 'cancellation_policy', ), + 'enumerate_licensed_users': ('parent', 'page_size', 'page_token', ), + 'get_license_pool': ('name', ), 'get_order': ('name', ), 'list_orders': ('parent', 'page_size', 'page_token', 'filter', ), + 'modify_order': ('name', 'modifications', 'display_name', 'etag', ), 'place_order': ('parent', 'display_name', 'line_item_info', 'request_id', ), + 'unassign': ('parent', 'usernames', ), + 'update_license_pool': ('license_pool', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index be7a8fe9c772..0bf79c077912 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -2436,6 +2436,582 @@ async def test_list_orders_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.ModifyOrderRequest, + dict, + ], +) +def test_modify_order(request_type, transport: str = "grpc"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = procurement_service.ModifyOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_modify_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest() + + +def test_modify_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = procurement_service.ModifyOrderRequest( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + +def test_modify_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.modify_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.modify_order] = mock_rpc + request = {} + client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.modify_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.modify_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest() + + +@pytest.mark.asyncio +async def test_modify_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_order + ] = mock_rpc + + request = {} + await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.modify_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_order_async( + transport: str = "grpc_asyncio", request_type=procurement_service.ModifyOrderRequest +): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = procurement_service.ModifyOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_modify_order_async_from_dict(): + await test_modify_order_async(request_type=dict) + + +def test_modify_order_field_headers(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.ModifyOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_modify_order_field_headers_async(): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.ModifyOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.CancelOrderRequest, + dict, + ], +) +def test_cancel_order(request_type, transport: str = "grpc"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = procurement_service.CancelOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_cancel_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest() + + +def test_cancel_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = procurement_service.CancelOrderRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest( + name="name_value", + etag="etag_value", + ) + + +def test_cancel_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel_order] = mock_rpc + request = {} + client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest() + + +@pytest.mark.asyncio +async def test_cancel_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_order + ] = mock_rpc + + request = {} + await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.cancel_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_order_async( + transport: str = "grpc_asyncio", request_type=procurement_service.CancelOrderRequest +): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = procurement_service.CancelOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_cancel_order_async_from_dict(): + await test_cancel_order_async(request_type=dict) + + +def test_cancel_order_field_headers(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.CancelOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_order_field_headers_async(): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.CancelOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2443,7 +3019,567 @@ async def test_list_orders_async_pages(): dict, ], ) -def test_place_order_rest(request_type): +def test_place_order_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.place_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_place_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.place_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.place_order] = mock_rpc + + request = {} + client.place_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.place_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_place_order_rest_required_fields( + request_type=procurement_service.PlaceOrderRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["display_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).place_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["displayName"] = "display_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).place_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "displayName" in jsonified_request + assert jsonified_request["displayName"] == "display_name_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.place_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_place_order_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.place_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "displayName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_place_order_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), + ) + client = ConsumerProcurementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" + ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = procurement_service.PlaceOrderRequest.pb( + procurement_service.PlaceOrderRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = procurement_service.PlaceOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.place_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_place_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.PlaceOrderRequest +): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.place_order(request) + + +def test_place_order_rest_error(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.GetOrderRequest, + dict, + ], +) +def test_get_order_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = order.Order( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_order(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, order.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + + +def test_get_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + + request = {} + client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_order_rest_required_fields( + request_type=procurement_service.GetOrderRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = order.Order() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_order_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_order_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), + ) + client = ConsumerProcurementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" + ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = procurement_service.GetOrderRequest.pb( + procurement_service.GetOrderRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = order.Order.to_json(order.Order()) + + request = procurement_service.GetOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = order.Order() + + client.get_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.GetOrderRequest +): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_order(request) + + +def test_get_order_rest_flattened(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = order.Order() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "billingAccounts/sample1/orders/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=billingAccounts/*/orders/*}" % client.transport._host, args[1] + ) + + +def test_get_order_rest_flattened_error(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_order( + procurement_service.GetOrderRequest(), + name="name_value", + ) + + +def test_get_order_rest_error(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.ListOrdersRequest, + dict, + ], +) +def test_list_orders_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2456,22 +3592,27 @@ def test_place_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = procurement_service.ListOrdersResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.place_order(request) + response = client.list_orders(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" -def test_place_order_rest_use_cached_wrapped_rpc(): +def test_list_orders_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2485,40 +3626,35 @@ def test_place_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.place_order in client._transport._wrapped_methods + assert client._transport.list_orders in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.place_order] = mock_rpc + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc request = {} - client.place_order(request) + client.list_orders(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.place_order(request) + client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_place_order_rest_required_fields( - request_type=procurement_service.PlaceOrderRequest, +def test_list_orders_rest_required_fields( + request_type=procurement_service.ListOrdersRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["display_name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -2529,24 +3665,29 @@ def test_place_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).place_order._get_unset_required_fields(jsonified_request) + ).list_orders._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["displayName"] = "display_name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).place_order._get_unset_required_fields(jsonified_request) + ).list_orders._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "displayName" in jsonified_request - assert jsonified_request["displayName"] == "display_name_value" client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2555,7 +3696,7 @@ def test_place_order_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = procurement_service.ListOrdersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2567,45 +3708,48 @@ def test_place_order_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.place_order(request) + response = client.list_orders(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_place_order_rest_unset_required_fields(): +def test_list_orders_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.place_order._get_unset_required_fields({}) + unset_fields = transport.list_orders._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "displayName", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_place_order_rest_interceptors(null_interceptor): +def test_list_orders_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2618,16 +3762,14 @@ def test_place_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" + transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" + transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.PlaceOrderRequest.pb( - procurement_service.PlaceOrderRequest() + pb_message = procurement_service.ListOrdersRequest.pb( + procurement_service.ListOrdersRequest() ) transcode.return_value = { "method": "post", @@ -2639,19 +3781,19 @@ def test_place_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = procurement_service.ListOrdersResponse.to_json( + procurement_service.ListOrdersResponse() ) - request = procurement_service.PlaceOrderRequest() + request = procurement_service.ListOrdersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = procurement_service.ListOrdersResponse() - client.place_order( + client.list_orders( request, metadata=[ ("key", "val"), @@ -2663,8 +3805,8 @@ def test_place_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_place_order_rest_bad_request( - transport: str = "rest", request_type=procurement_service.PlaceOrderRequest +def test_list_orders_rest_bad_request( + transport: str = "rest", request_type=procurement_service.ListOrdersRequest ): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2684,23 +3826,135 @@ def test_place_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.place_order(request) + client.list_orders(request) -def test_place_order_rest_error(): +def test_list_orders_rest_flattened(): client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = procurement_service.ListOrdersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "billingAccounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_orders(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*}/orders" % client.transport._host, args[1] + ) + + +def test_list_orders_rest_flattened_error(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + procurement_service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_rest_pager(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + order.Order(), + order.Order(), + ], + next_page_token="abc", + ), + procurement_service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + ], + next_page_token="ghi", + ), + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + order.Order(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + procurement_service.ListOrdersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "billingAccounts/sample1"} + + pager = client.list_orders(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, order.Order) for i in results) + + pages = list(client.list_orders(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - procurement_service.GetOrderRequest, + procurement_service.ModifyOrderRequest, dict, ], ) -def test_get_order_rest(request_type): +def test_modify_order_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2713,31 +3967,22 @@ def test_get_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order.Order( - name="name_value", - display_name="display_name_value", - etag="etag_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.modify_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, order.Order) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" + assert response.operation.name == "operations/spam" -def test_get_order_rest_use_cached_wrapped_rpc(): +def test_modify_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2751,30 +3996,34 @@ def test_get_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_order in client._transport._wrapped_methods + assert client._transport.modify_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + client._transport._wrapped_methods[client._transport.modify_order] = mock_rpc request = {} - client.get_order(request) + client.modify_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_order(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.modify_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_order_rest_required_fields( - request_type=procurement_service.GetOrderRequest, +def test_modify_order_rest_required_fields( + request_type=procurement_service.ModifyOrderRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport @@ -2790,7 +4039,7 @@ def test_get_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).modify_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2799,7 +4048,7 @@ def test_get_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).modify_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2813,7 +4062,7 @@ def test_get_order_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = order.Order() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2825,39 +4074,37 @@ def test_get_order_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.modify_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_order_rest_unset_required_fields(): +def test_modify_order_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_order._get_unset_required_fields({}) + unset_fields = transport.modify_order._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_order_rest_interceptors(null_interceptor): +def test_modify_order_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2870,14 +4117,16 @@ def test_get_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_modify_order" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" + transports.ConsumerProcurementServiceRestInterceptor, "pre_modify_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.GetOrderRequest.pb( - procurement_service.GetOrderRequest() + pb_message = procurement_service.ModifyOrderRequest.pb( + procurement_service.ModifyOrderRequest() ) transcode.return_value = { "method": "post", @@ -2889,108 +4138,55 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = order.Order.to_json(order.Order()) - - request = procurement_service.GetOrderRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = order.Order() - - client.get_order( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_order_rest_bad_request( - transport: str = "rest", request_type=procurement_service.GetOrderRequest -): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "billingAccounts/sample1/orders/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_order(request) - - -def test_get_order_rest_flattened(): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = order.Order() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "billingAccounts/sample1/orders/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.get_order(**mock_args) + request = procurement_service.ModifyOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=billingAccounts/*/orders/*}" % client.transport._host, args[1] + client.modify_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_get_order_rest_flattened_error(transport: str = "rest"): + +def test_modify_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.ModifyOrderRequest +): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_order( - procurement_service.GetOrderRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.modify_order(request) -def test_get_order_rest_error(): +def test_modify_order_rest_error(): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2999,44 +4195,39 @@ def test_get_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - procurement_service.ListOrdersRequest, + procurement_service.CancelOrderRequest, dict, ], ) -def test_list_orders_rest(request_type): +def test_cancel_order_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} + request_init = {"name": "billingAccounts/sample1/orders/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.cancel_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrdersPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_orders_rest_use_cached_wrapped_rpc(): +def test_cancel_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3050,35 +4241,39 @@ def test_list_orders_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_orders in client._transport._wrapped_methods + assert client._transport.cancel_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_order] = mock_rpc request = {} - client.list_orders(request) + client.cancel_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_orders(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_orders_rest_required_fields( - request_type=procurement_service.ListOrdersRequest, +def test_cancel_order_rest_required_fields( + request_type=procurement_service.CancelOrderRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3089,29 +4284,21 @@ def test_list_orders_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) + ).cancel_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).cancel_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3120,7 +4307,7 @@ def test_list_orders_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3132,48 +4319,37 @@ def test_list_orders_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.cancel_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_orders_rest_unset_required_fields(): +def test_cancel_order_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_orders._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.cancel_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_orders_rest_interceptors(null_interceptor): +def test_cancel_order_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3186,14 +4362,16 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_cancel_order" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" + transports.ConsumerProcurementServiceRestInterceptor, "pre_cancel_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.ListOrdersRequest.pb( - procurement_service.ListOrdersRequest() + pb_message = procurement_service.CancelOrderRequest.pb( + procurement_service.CancelOrderRequest() ) transcode.return_value = { "method": "post", @@ -3205,19 +4383,19 @@ def test_list_orders_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = procurement_service.ListOrdersResponse.to_json( - procurement_service.ListOrdersResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = procurement_service.ListOrdersRequest() + request = procurement_service.CancelOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = procurement_service.ListOrdersResponse() + post.return_value = operations_pb2.Operation() - client.list_orders( + client.cancel_order( request, metadata=[ ("key", "val"), @@ -3229,8 +4407,8 @@ def test_list_orders_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_orders_rest_bad_request( - transport: str = "rest", request_type=procurement_service.ListOrdersRequest +def test_cancel_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.CancelOrderRequest ): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3238,7 +4416,7 @@ def test_list_orders_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} + request_init = {"name": "billingAccounts/sample1/orders/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3250,126 +4428,14 @@ def test_list_orders_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_orders(request) - - -def test_list_orders_rest_flattened(): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "billingAccounts/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_orders(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=billingAccounts/*}/orders" % client.transport._host, args[1] - ) - - -def test_list_orders_rest_flattened_error(transport: str = "rest"): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_orders( - procurement_service.ListOrdersRequest(), - parent="parent_value", - ) + client.cancel_order(request) -def test_list_orders_rest_pager(transport: str = "rest"): +def test_cancel_order_rest_error(): client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - order.Order(), - order.Order(), - ], - next_page_token="abc", - ), - procurement_service.ListOrdersResponse( - orders=[], - next_page_token="def", - ), - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - ], - next_page_token="ghi", - ), - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - order.Order(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - procurement_service.ListOrdersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "billingAccounts/sample1"} - - pager = client.list_orders(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, order.Order) for i in results) - - pages = list(client.list_orders(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -3513,6 +4579,8 @@ def test_consumer_procurement_service_base_transport(): "place_order", "get_order", "list_orders", + "modify_order", + "cancel_order", "get_operation", ) for method in methods: @@ -3809,6 +4877,12 @@ def test_consumer_procurement_service_client_transport_session_collision( session1 = client1.transport.list_orders._session session2 = client2.transport.list_orders._session assert session1 != session2 + session1 = client1.transport.modify_order._session + session2 = client2.transport.modify_order._session + assert session1 != session2 + session1 = client1.transport.cancel_order._session + session2 = client2.transport.cancel_order._session + assert session1 != session2 def test_consumer_procurement_service_grpc_transport_channel(): diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py new file mode 100644 index 000000000000..aab21d579a3a --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py @@ -0,0 +1,6057 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, + pagers, + transports, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LicenseManagementServiceClient._get_default_mtls_endpoint(None) is None + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + LicenseManagementServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LicenseManagementServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LicenseManagementServiceClient._get_client_cert_source(None, False) is None + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LicenseManagementServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + default_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + LicenseManagementServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + LicenseManagementServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LicenseManagementServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LicenseManagementServiceClient._get_universe_domain(None, None) + == LicenseManagementServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + LicenseManagementServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LicenseManagementServiceClient, "grpc"), + (LicenseManagementServiceAsyncClient, "grpc_asyncio"), + (LicenseManagementServiceClient, "rest"), + ], +) +def test_license_management_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LicenseManagementServiceGrpcTransport, "grpc"), + (transports.LicenseManagementServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LicenseManagementServiceRestTransport, "rest"), + ], +) +def test_license_management_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LicenseManagementServiceClient, "grpc"), + (LicenseManagementServiceAsyncClient, "grpc_asyncio"), + (LicenseManagementServiceClient, "rest"), + ], +) +def test_license_management_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com" + ) + + +def test_license_management_service_client_get_transport_class(): + transport = LicenseManagementServiceClient.get_transport_class() + available_transports = [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceRestTransport, + ] + assert transport in available_transports + + transport = LicenseManagementServiceClient.get_transport_class("grpc") + assert transport == transports.LicenseManagementServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + LicenseManagementServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + LicenseManagementServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + "true", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + "false", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + "true", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_license_management_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [LicenseManagementServiceClient, LicenseManagementServiceAsyncClient], +) +@mock.patch.object( + LicenseManagementServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [LicenseManagementServiceClient, LicenseManagementServiceAsyncClient], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + default_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +def test_license_management_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + None, + ), + ], +) +def test_license_management_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_license_management_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = LicenseManagementServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_license_management_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudcommerceconsumerprocurement.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudcommerceconsumerprocurement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.GetLicensePoolRequest, + dict, + ], +) +def test_get_license_pool(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + response = client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.GetLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_get_license_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest() + + +def test_get_license_pool_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.GetLicensePoolRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_license_pool(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest( + name="name_value", + ) + + +def test_get_license_pool_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_license_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_license_pool + ] = mock_rpc + request = {} + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_license_pool_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.get_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest() + + +@pytest.mark.asyncio +async def test_get_license_pool_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_license_pool + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_license_pool + ] = mock_rpc + + request = {} + await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_license_pool_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.GetLicensePoolRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.GetLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +@pytest.mark.asyncio +async def test_get_license_pool_async_from_dict(): + await test_get_license_pool_async(request_type=dict) + + +def test_get_license_pool_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.GetLicensePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value = license_management_service.LicensePool() + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_license_pool_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.GetLicensePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_license_pool_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_license_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_license_pool_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_license_pool_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_license_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_license_pool_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UpdateLicensePoolRequest, + dict, + ], +) +def test_update_license_pool(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + response = client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.UpdateLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_update_license_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +def test_update_license_pool_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.UpdateLicensePoolRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_license_pool(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +def test_update_license_pool_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_license_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_license_pool + ] = mock_rpc + request = {} + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_license_pool_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.update_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +@pytest.mark.asyncio +async def test_update_license_pool_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_license_pool + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_license_pool + ] = mock_rpc + + request = {} + await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_license_pool_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.UpdateLicensePoolRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.UpdateLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +@pytest.mark.asyncio +async def test_update_license_pool_async_from_dict(): + await test_update_license_pool_async(request_type=dict) + + +def test_update_license_pool_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UpdateLicensePoolRequest() + + request.license_pool.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value = license_management_service.LicensePool() + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "license_pool.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_license_pool_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UpdateLicensePoolRequest() + + request.license_pool.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "license_pool.name=name_value", + ) in kw["metadata"] + + +def test_update_license_pool_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_license_pool( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].license_pool + mock_val = license_management_service.LicensePool(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_license_pool_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_license_pool_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_license_pool( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].license_pool + mock_val = license_management_service.LicensePool(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_license_pool_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.AssignRequest, + dict, + ], +) +def test_assign(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + response = client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.AssignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +def test_assign_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.assign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest() + + +def test_assign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.AssignRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.assign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest( + parent="parent_value", + ) + + +def test_assign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.assign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.assign] = mock_rpc + request = {} + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_assign_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + response = await client.assign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest() + + +@pytest.mark.asyncio +async def test_assign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.assign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.assign + ] = mock_rpc + + request = {} + await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_assign_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.AssignRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + response = await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.AssignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +@pytest.mark.asyncio +async def test_assign_async_from_dict(): + await test_assign_async(request_type=dict) + + +def test_assign_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.AssignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value = license_management_service.AssignResponse() + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_assign_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.AssignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_assign_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.assign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +def test_assign_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.asyncio +async def test_assign_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.assign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_assign_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UnassignRequest, + dict, + ], +) +def test_unassign(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + response = client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.UnassignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +def test_unassign_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unassign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest() + + +def test_unassign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.UnassignRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unassign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest( + parent="parent_value", + ) + + +def test_unassign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unassign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.unassign] = mock_rpc + request = {} + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unassign_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + response = await client.unassign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest() + + +@pytest.mark.asyncio +async def test_unassign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.unassign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.unassign + ] = mock_rpc + + request = {} + await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unassign_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.UnassignRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + response = await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.UnassignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +@pytest.mark.asyncio +async def test_unassign_async_from_dict(): + await test_unassign_async(request_type=dict) + + +def test_unassign_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UnassignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value = license_management_service.UnassignResponse() + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_unassign_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UnassignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_unassign_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.unassign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +def test_unassign_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.asyncio +async def test_unassign_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.unassign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_unassign_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.EnumerateLicensedUsersRequest, + dict, + ], +) +def test_enumerate_licensed_users(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + response = client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.EnumerateLicensedUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_enumerate_licensed_users_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enumerate_licensed_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest() + + +def test_enumerate_licensed_users_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.EnumerateLicensedUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enumerate_licensed_users(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_enumerate_licensed_users_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enumerate_licensed_users + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enumerate_licensed_users + ] = mock_rpc + request = {} + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.enumerate_licensed_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest() + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.enumerate_licensed_users + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.enumerate_licensed_users + ] = mock_rpc + + request = {} + await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.EnumerateLicensedUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_from_dict(): + await test_enumerate_licensed_users_async(request_type=dict) + + +def test_enumerate_licensed_users_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.EnumerateLicensedUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.EnumerateLicensedUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse() + ) + await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_enumerate_licensed_users_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.enumerate_licensed_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_enumerate_licensed_users_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.enumerate_licensed_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +def test_enumerate_licensed_users_pager(transport_name: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.enumerate_licensed_users( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in results + ) + + +def test_enumerate_licensed_users_pages(transport_name: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + pages = list(client.enumerate_licensed_users(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_pager(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + async_pager = await client.enumerate_licensed_users( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in responses + ) + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_pages(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.enumerate_licensed_users(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.GetLicensePoolRequest, + dict, + ], +) +def test_get_license_pool_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_license_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_get_license_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_license_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_license_pool + ] = mock_rpc + + request = {} + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_license_pool_rest_required_fields( + request_type=license_management_service.GetLicensePoolRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_license_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_license_pool_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_license_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_license_pool_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_get_license_pool" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_get_license_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.GetLicensePoolRequest.pb( + license_management_service.GetLicensePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.LicensePool.to_json( + license_management_service.LicensePool() + ) + + request = license_management_service.GetLicensePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.LicensePool() + + client.get_license_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_license_pool_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.GetLicensePoolRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_license_pool(request) + + +def test_get_license_pool_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_license_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=billingAccounts/*/orders/*/licensePool}" + % client.transport._host, + args[1], + ) + + +def test_get_license_pool_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +def test_get_license_pool_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UpdateLicensePoolRequest, + dict, + ], +) +def test_update_license_pool_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + request_init["license_pool"] = { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3", + "license_assignment_protocol": { + "manual_assignment_type": {}, + "auto_assignment_type": { + "inactive_license_ttl": {"seconds": 751, "nanos": 543} + }, + }, + "available_license_count": 2411, + "total_license_count": 2030, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = license_management_service.UpdateLicensePoolRequest.meta.fields[ + "license_pool" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["license_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["license_pool"][field])): + del request_init["license_pool"][field][i][subfield] + else: + del request_init["license_pool"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_license_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_update_license_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_license_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_license_pool + ] = mock_rpc + + request = {} + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_license_pool_rest_required_fields( + request_type=license_management_service.UpdateLicensePoolRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_license_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_license_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_license_pool_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_license_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "licensePool", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_license_pool_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_update_license_pool" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_update_license_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.UpdateLicensePoolRequest.pb( + license_management_service.UpdateLicensePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.LicensePool.to_json( + license_management_service.LicensePool() + ) + + request = license_management_service.UpdateLicensePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.LicensePool() + + client.update_license_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_license_pool_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.UpdateLicensePoolRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_license_pool(request) + + +def test_update_license_pool_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_license_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{license_pool.name=billingAccounts/*/orders/*/licensePool/*}" + % client.transport._host, + args[1], + ) + + +def test_update_license_pool_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_license_pool_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.AssignRequest, + dict, + ], +) +def test_assign_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.assign(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +def test_assign_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.assign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.assign] = mock_rpc + + request = {} + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_assign_rest_required_fields( + request_type=license_management_service.AssignRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["usernames"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).assign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["usernames"] = "usernames_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).assign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "usernames" in jsonified_request + assert jsonified_request["usernames"] == "usernames_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.assign(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_assign_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.assign._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "usernames", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_assign_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_assign" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_assign" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.AssignRequest.pb( + license_management_service.AssignRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.AssignResponse.to_json( + license_management_service.AssignResponse() + ) + + request = license_management_service.AssignRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.AssignResponse() + + client.assign( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_assign_rest_bad_request( + transport: str = "rest", request_type=license_management_service.AssignRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.assign(request) + + +def test_assign_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + usernames=["usernames_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.assign(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:assign" + % client.transport._host, + args[1], + ) + + +def test_assign_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +def test_assign_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UnassignRequest, + dict, + ], +) +def test_unassign_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.unassign(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +def test_unassign_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unassign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.unassign] = mock_rpc + + request = {} + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_unassign_rest_required_fields( + request_type=license_management_service.UnassignRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["usernames"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unassign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["usernames"] = "usernames_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unassign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "usernames" in jsonified_request + assert jsonified_request["usernames"] == "usernames_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.unassign(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_unassign_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.unassign._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "usernames", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_unassign_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_unassign" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_unassign" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.UnassignRequest.pb( + license_management_service.UnassignRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.UnassignResponse.to_json( + license_management_service.UnassignResponse() + ) + + request = license_management_service.UnassignRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.UnassignResponse() + + client.unassign( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_unassign_rest_bad_request( + transport: str = "rest", request_type=license_management_service.UnassignRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unassign(request) + + +def test_unassign_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + usernames=["usernames_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.unassign(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:unassign" + % client.transport._host, + args[1], + ) + + +def test_unassign_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +def test_unassign_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.EnumerateLicensedUsersRequest, + dict, + ], +) +def test_enumerate_licensed_users_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enumerate_licensed_users(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_enumerate_licensed_users_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enumerate_licensed_users + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enumerate_licensed_users + ] = mock_rpc + + request = {} + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_enumerate_licensed_users_rest_required_fields( + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enumerate_licensed_users._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enumerate_licensed_users._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enumerate_licensed_users(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enumerate_licensed_users_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enumerate_licensed_users._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enumerate_licensed_users_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_enumerate_licensed_users", + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "pre_enumerate_licensed_users", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.EnumerateLicensedUsersRequest.pb( + license_management_service.EnumerateLicensedUsersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + license_management_service.EnumerateLicensedUsersResponse.to_json( + license_management_service.EnumerateLicensedUsersResponse() + ) + ) + + request = license_management_service.EnumerateLicensedUsersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.EnumerateLicensedUsersResponse() + + client.enumerate_licensed_users( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enumerate_licensed_users_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enumerate_licensed_users(request) + + +def test_enumerate_licensed_users_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enumerate_licensed_users(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:enumerateLicensedUsers" + % client.transport._host, + args[1], + ) + + +def test_enumerate_licensed_users_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +def test_enumerate_licensed_users_rest_pager(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + license_management_service.EnumerateLicensedUsersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + pager = client.enumerate_licensed_users(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in results + ) + + pages = list(client.enumerate_licensed_users(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LicenseManagementServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LicenseManagementServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + transports.LicenseManagementServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LicenseManagementServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LicenseManagementServiceGrpcTransport, + ) + + +def test_license_management_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LicenseManagementServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_license_management_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LicenseManagementServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_license_pool", + "update_license_pool", + "assign", + "unassign", + "enumerate_licensed_users", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_license_management_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseManagementServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_license_management_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseManagementServiceTransport() + adc.assert_called_once() + + +def test_license_management_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LicenseManagementServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + transports.LicenseManagementServiceRestTransport, + ], +) +def test_license_management_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LicenseManagementServiceGrpcTransport, grpc_helpers), + (transports.LicenseManagementServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_license_management_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudcommerceconsumerprocurement.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudcommerceconsumerprocurement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_license_management_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LicenseManagementServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_license_management_service_host_no_port(transport_name): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudcommerceconsumerprocurement.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_license_management_service_host_with_port(transport_name): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudcommerceconsumerprocurement.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_license_management_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LicenseManagementServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LicenseManagementServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_license_pool._session + session2 = client2.transport.get_license_pool._session + assert session1 != session2 + session1 = client1.transport.update_license_pool._session + session2 = client2.transport.update_license_pool._session + assert session1 != session2 + session1 = client1.transport.assign._session + session2 = client2.transport.assign._session + assert session1 != session2 + session1 = client1.transport.unassign._session + session2 = client2.transport.unassign._session + assert session1 != session2 + session1 = client1.transport.enumerate_licensed_users._session + session2 = client2.transport.enumerate_licensed_users._session + assert session1 != session2 + + +def test_license_management_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LicenseManagementServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_license_management_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LicenseManagementServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_license_pool_path(): + billing_account = "squid" + order = "clam" + expected = "billingAccounts/{billing_account}/orders/{order}/licensePool".format( + billing_account=billing_account, + order=order, + ) + actual = LicenseManagementServiceClient.license_pool_path(billing_account, order) + assert expected == actual + + +def test_parse_license_pool_path(): + expected = { + "billing_account": "whelk", + "order": "octopus", + } + path = LicenseManagementServiceClient.license_pool_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_license_pool_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = LicenseManagementServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LicenseManagementServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = LicenseManagementServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LicenseManagementServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = LicenseManagementServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LicenseManagementServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = LicenseManagementServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LicenseManagementServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = LicenseManagementServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LicenseManagementServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LicenseManagementServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LicenseManagementServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LicenseManagementServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "billingAccounts/sample1/orders/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "billingAccounts/sample1/orders/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From bd7ac5328808f9aadfad08404348bc1cc473ff08 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 7 Oct 2024 17:13:23 +0200 Subject: [PATCH 53/59] chore(deps): update all dependencies (#13130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | Type | Update | |---|---|---|---|---|---|---|---| | [google-cloud-kms](https://redirect.github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms) ([source](https://redirect.github.com/googleapis/google-cloud-python)) | `>= 2.3.0, <3.0.0dev` -> `>=3.0.0, <3.1.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/google-cloud-kms/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/google-cloud-kms/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/google-cloud-kms/2.24.2/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/google-cloud-kms/2.24.2/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | major | | [ubuntu](https://redirect.github.com/actions/runner-images) | `22.04` -> `24.04` | [![age](https://developer.mend.io/api/mc/badges/age/github-runners/ubuntu/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/github-runners/ubuntu/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/github-runners/ubuntu/22.04/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/github-runners/ubuntu/22.04/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | github-runner | major | --- ### Release Notes
googleapis/google-cloud-python (google-cloud-kms) ### [`v3.0.0`](https://redirect.github.com/googleapis/google-cloud-python/releases/tag/google-cloud-kms-v3.0.0): google-cloud-kms: v3.0.0 [Compare Source](https://redirect.github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.2...google-cloud-kms-v3.0.0) ##### ⚠ BREAKING CHANGES - Pagination feature is introduced for method ListKeyHandles in service Autokey ##### Features - Adding a state field for AutokeyConfig ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ##### Bug Fixes - Pagination feature is introduced for method ListKeyHandles in service Autokey ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ##### Documentation - A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) - Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4))
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://redirect.github.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/googleapis/google-cloud-python). BEGIN_COMMIT_OVERRIDE fix(deps): allow google-cloud-kms 3.x END_COMMIT_OVERRIDE --------- Co-authored-by: ohmayr Co-authored-by: Anthonios Partheniou --- .github/workflows/main.yml | 2 +- packages/google-cloud-kms-inventory/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a77a8b8e8e3..678e2c6a3724 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,7 @@ jobs: permissions: pull-requests: write # for googleapis/code-suggester name: Update API List PR - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # don't run the workflow on forks of googleapis/google-cloud-python if: ${{github.repository == 'googleapis/google-cloud-python'}} steps: diff --git a/packages/google-cloud-kms-inventory/setup.py b/packages/google-cloud-kms-inventory/setup.py index fe5e2c1e73d4..6fd9503942b2 100644 --- a/packages/google-cloud-kms-inventory/setup.py +++ b/packages/google-cloud-kms-inventory/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-kms >= 2.3.0, <3.0.0dev", + "google-cloud-kms >= 2.3.0, <4.0.0", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory" From 27c262d51c5d9f055152d9448f5fb6759da4bdb3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:15:31 -0400 Subject: [PATCH 54/59] feat: [google-cloud-backupdr] Client library for the backupvault api is added (#13110) - [ ] Regenerate this pull request now. fix!: Remove visibility of unneeded InitiateBackup RPC fix!: Remove visibility of unneeded AbandonBackup RPC fix!: Remove visibility of unneeded FinalizeBackup RPC fix!: Remove visibility of unneeded RemoveDataSource RPC fix!: Remove visibility of unneeded SetInternalStatus RPC PiperOrigin-RevId: 683196317 Source-Link: https://github.com/googleapis/googleapis/commit/c532f355b2bae18fdff19ced316897433de5f093 Source-Link: https://github.com/googleapis/googleapis-gen/commit/fd43f5733b002358fa679623f6099aafb6660c8e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhY2t1cGRyLy5Pd2xCb3QueWFtbCIsImgiOiJmZDQzZjU3MzNiMDAyMzU4ZmE2Nzk2MjNmNjA5OWFhZmI2NjYwYzhlIn0= BEGIN_NESTED_COMMIT feat: [google-cloud-backupdr] Client library for the backupvault api is added feat: Add backupplan proto feat: Add backupplanassociation proto feat: Add backupvault_ba proto feat: Add backupvault_gce proto docs: A comment for field `oauth2_client_id` in message `.google.cloud.backupdr.v1.ManagementServer` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.ListManagementServersRequest` is changed docs: A comment for field `management_servers` in message `.google.cloud.backupdr.v1.ListManagementServersResponse` is changed docs: A comment for field `name` in message `.google.cloud.backupdr.v1.GetManagementServerRequest` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.CreateManagementServerRequest` is changed docs: A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed PiperOrigin-RevId: 678800741 Source-Link: https://github.com/googleapis/googleapis/commit/d36e288fc56ace0443c96ee1e385529c4ec4198c Source-Link: https://github.com/googleapis/googleapis-gen/commit/fba9d225b43dcf7361006491810485cdf1b57cdb Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhY2t1cGRyLy5Pd2xCb3QueWFtbCIsImgiOiJmYmE5ZDIyNWI0M2RjZjczNjEwMDY0OTE4MTA0ODVjZGYxYjU3Y2RiIn0= END_NESTED_COMMIT BEGIN_COMMIT_OVERRIDE feat: [google-cloud-backupdr] Client library for the backupvault api is added feat: Add backupplan proto feat: Add backupplanassociation proto feat: Add backupvault_ba proto feat: Add backupvault_gce proto docs: A comment for field `oauth2_client_id` in message `.google.cloud.backupdr.v1.ManagementServer` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.ListManagementServersRequest` is changed docs: A comment for field `management_servers` in message `.google.cloud.backupdr.v1.ListManagementServersResponse` is changed docs: A comment for field `name` in message `.google.cloud.backupdr.v1.GetManagementServerRequest` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.CreateManagementServerRequest` is changed docs: A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed END_COMMIT_OVERRIDE --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/backupdr/__init__.py | 166 + .../google/cloud/backupdr_v1/__init__.py | 164 + .../cloud/backupdr_v1/gapic_metadata.json | 345 + .../services/backup_dr/async_client.py | 2964 ++- .../backupdr_v1/services/backup_dr/client.py | 3011 ++- .../backupdr_v1/services/backup_dr/pagers.py | 925 +- .../services/backup_dr/transports/base.py | 430 +- .../services/backup_dr/transports/grpc.py | 638 +- .../backup_dr/transports/grpc_asyncio.py | 862 +- .../services/backup_dr/transports/rest.py | 3645 ++- .../cloud/backupdr_v1/types/__init__.py | 164 + .../cloud/backupdr_v1/types/backupdr.py | 37 +- .../cloud/backupdr_v1/types/backupplan.py | 644 + .../types/backupplanassociation.py | 454 + .../cloud/backupdr_v1/types/backupvault.py | 2065 ++ .../cloud/backupdr_v1/types/backupvault_ba.py | 87 + .../backupdr_v1/types/backupvault_gce.py | 1991 ++ ...dr_create_backup_plan_association_async.py | 62 + ..._dr_create_backup_plan_association_sync.py | 62 + ...ated_backup_dr_create_backup_plan_async.py | 68 + ...rated_backup_dr_create_backup_plan_sync.py | 68 + ...ted_backup_dr_create_backup_vault_async.py | 57 + ...ated_backup_dr_create_backup_vault_sync.py | 57 + ...generated_backup_dr_delete_backup_async.py | 56 + ...dr_delete_backup_plan_association_async.py | 56 + ..._dr_delete_backup_plan_association_sync.py | 56 + ...ated_backup_dr_delete_backup_plan_async.py | 56 + ...rated_backup_dr_delete_backup_plan_sync.py | 56 + ..._generated_backup_dr_delete_backup_sync.py | 56 + ...ted_backup_dr_delete_backup_vault_async.py | 56 + ...ated_backup_dr_delete_backup_vault_sync.py | 56 + ...kup_dr_fetch_usable_backup_vaults_async.py | 53 + ...ckup_dr_fetch_usable_backup_vaults_sync.py | 53 + ...v1_generated_backup_dr_get_backup_async.py | 52 + ...up_dr_get_backup_plan_association_async.py | 52 + ...kup_dr_get_backup_plan_association_sync.py | 52 + ...nerated_backup_dr_get_backup_plan_async.py | 52 + ...enerated_backup_dr_get_backup_plan_sync.py | 52 + ..._v1_generated_backup_dr_get_backup_sync.py | 52 + ...erated_backup_dr_get_backup_vault_async.py | 52 + ...nerated_backup_dr_get_backup_vault_sync.py | 52 + ...nerated_backup_dr_get_data_source_async.py | 52 + ...enerated_backup_dr_get_data_source_sync.py | 52 + ..._dr_list_backup_plan_associations_async.py | 53 + ...p_dr_list_backup_plan_associations_sync.py | 53 + ...rated_backup_dr_list_backup_plans_async.py | 53 + ...erated_backup_dr_list_backup_plans_sync.py | 53 + ...ated_backup_dr_list_backup_vaults_async.py | 53 + ...rated_backup_dr_list_backup_vaults_sync.py | 53 + ..._generated_backup_dr_list_backups_async.py | 53 + ...1_generated_backup_dr_list_backups_sync.py | 53 + ...rated_backup_dr_list_data_sources_async.py | 53 + ...erated_backup_dr_list_data_sources_sync.py | 53 + ...enerated_backup_dr_restore_backup_async.py | 61 + ...generated_backup_dr_restore_backup_sync.py | 61 + ...enerated_backup_dr_trigger_backup_async.py | 57 + ...generated_backup_dr_trigger_backup_sync.py | 57 + ...generated_backup_dr_update_backup_async.py | 55 + ..._generated_backup_dr_update_backup_sync.py | 55 + ...ted_backup_dr_update_backup_vault_async.py | 55 + ...ated_backup_dr_update_backup_vault_sync.py | 55 + ...ated_backup_dr_update_data_source_async.py | 55 + ...rated_backup_dr_update_data_source_sync.py | 55 + ...pet_metadata_google.cloud.backupdr.v1.json | 4061 +++- .../scripts/fixup_backupdr_v1_keywords.py | 23 + .../unit/gapic/backupdr_v1/test_backup_dr.py | 20048 +++++++++++++++- 66 files changed, 44283 insertions(+), 985 deletions(-) create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index c29b14037f34..5ab4e805a40b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -33,6 +33,94 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from google.cloud.backupdr_v1.types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from google.cloud.backupdr_v1.types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from google.cloud.backupdr_v1.types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from google.cloud.backupdr_v1.types.backupvault_ba import ( + BackupApplianceBackupProperties, +) +from google.cloud.backupdr_v1.types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRClient", @@ -48,4 +136,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index a2dc2b97f601..eddcfa53658f 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -32,19 +32,183 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .types.backupvault_ba import BackupApplianceBackupProperties +from .types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRAsyncClient", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceBackupProperties", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupConfigState", "BackupDRClient", + "BackupLock", + "BackupPlan", + "BackupPlanAssociation", + "BackupRule", + "BackupVault", + "BackupVaultView", + "BackupView", + "BackupWindow", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CreateBackupPlanAssociationRequest", + "CreateBackupPlanRequest", + "CreateBackupVaultRequest", "CreateManagementServerRequest", + "CustomerEncryptionKey", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupPlanAssociationRequest", + "DeleteBackupPlanRequest", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", "DeleteManagementServerRequest", + "DisplayDevice", + "Entry", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupPlanAssociationRequest", + "GetBackupPlanRequest", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", "GetManagementServerRequest", + "GuestOsFeature", + "InstanceParams", + "KeyRevocationActionType", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListBackupsRequest", + "ListBackupsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", "ListManagementServersRequest", "ListManagementServersResponse", "ManagementServer", "ManagementURI", + "Metadata", "NetworkConfig", + "NetworkInterface", + "NetworkPerformanceConfig", "OperationMetadata", + "RestoreBackupRequest", + "RestoreBackupResponse", + "RuleConfigInfo", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "ServiceLockInfo", + "StandardSchedule", + "Tags", + "TargetResource", + "TriggerBackupRequest", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "WeekDayOfMonth", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 4a9d58bb8dad..902530688c39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -10,75 +10,420 @@ "grpc": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "grpc-async": { "libraryClient": "BackupDRAsyncClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "rest": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } } diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 33ea84a57365..ae020d8602f6 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -48,12 +48,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .client import BackupDRClient from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -72,6 +81,20 @@ class BackupDRAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = BackupDRClient._DEFAULT_UNIVERSE + backup_path = staticmethod(BackupDRClient.backup_path) + parse_backup_path = staticmethod(BackupDRClient.parse_backup_path) + backup_plan_path = staticmethod(BackupDRClient.backup_plan_path) + parse_backup_plan_path = staticmethod(BackupDRClient.parse_backup_plan_path) + backup_plan_association_path = staticmethod( + BackupDRClient.backup_plan_association_path + ) + parse_backup_plan_association_path = staticmethod( + BackupDRClient.parse_backup_plan_association_path + ) + backup_vault_path = staticmethod(BackupDRClient.backup_vault_path) + parse_backup_vault_path = staticmethod(BackupDRClient.parse_backup_vault_path) + data_source_path = staticmethod(BackupDRClient.data_source_path) + parse_data_source_path = staticmethod(BackupDRClient.parse_data_source_path) management_server_path = staticmethod(BackupDRClient.management_server_path) parse_management_server_path = staticmethod( BackupDRClient.parse_management_server_path @@ -304,10 +327,10 @@ async def sample_list_management_servers(): parent (:class:`str`): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -428,7 +451,7 @@ async def sample_get_management_server(): name (:class:`str`): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -542,10 +565,9 @@ async def sample_create_management_server(): management server instance. parent (:class:`str`): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -766,6 +788,2928 @@ async def sample_delete_management_server(): # Done; return the response. return response + async def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]]): + The request object. Message for creating a BackupVault. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (:class:`str`): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsAsyncPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]]): + The request object. Request message for listing + backupvault stores. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsAsyncPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]]): + The request object. Request message for fetching usable + BackupVaults. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchUsableBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]]): + The request object. Request message for getting a + BackupVault. + name (:class:`str`): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]]): + The request object. Request message for updating a + BackupVault. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]]): + The request object. Message for deleting a BackupVault. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]]): + The request object. Request message for listing + DataSources. + parent (:class:`str`): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_sources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]]): + The request object. Request message for getting a + DataSource instance. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]]): + The request object. Request message for updating a data + source instance. + data_source (:class:`google.cloud.backupdr_v1.types.DataSource`): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]]): + The request object. Request message for listing Backups. + parent (:class:`str`): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]]): + The request object. Request message for getting a Backup. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]]): + The request object. Request message for updating a + Backup. + backup (:class:`google.cloud.backupdr_v1.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]]): + The request object. Request message for restoring from a + Backup. + name (:class:`str`): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.restore_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]]): + The request object. The request message for creating a ``BackupPlan``. + parent (:class:`str`): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (:class:`google.cloud.backupdr_v1.types.BackupPlan`): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (:class:`str`): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]]): + The request object. The request message for getting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansAsyncPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]]): + The request object. The request message for getting a list ``BackupPlan``. + parent (:class:`str`): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plans + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlansAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]]): + The request object. The request message for deleting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): + The request object. Request message for creating a backup + plan. + parent (:class:`str`): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (:class:`str`): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsAsyncPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): + The request object. Request message for List + BackupPlanAssociation + parent (:class:`str`): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlanAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]]): + The request object. Request message for deleting a backup + plan association. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]]): + The request object. Request message for triggering a + backup. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (:class:`str`): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.trigger_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index a853cfead99d..b884fc527a39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -54,12 +54,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport from .transports.grpc import BackupDRGrpcTransport @@ -191,6 +200,126 @@ def transport(self) -> BackupDRTransport: """ return self._transport + @staticmethod + def backup_path( + project: str, + location: str, + backupvault: str, + datasource: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_path( + project: str, + location: str, + backup_plan: str, + ) -> str: + """Returns a fully-qualified backup_plan string.""" + return ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + + @staticmethod + def parse_backup_plan_path(path: str) -> Dict[str, str]: + """Parses a backup_plan path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlans/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_association_path( + project: str, + location: str, + backup_plan_association: str, + ) -> str: + """Returns a fully-qualified backup_plan_association string.""" + return "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + + @staticmethod + def parse_backup_plan_association_path(path: str) -> Dict[str, str]: + """Parses a backup_plan_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlanAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_vault_path( + project: str, + location: str, + backupvault: str, + ) -> str: + """Returns a fully-qualified backup_vault string.""" + return ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + + @staticmethod + def parse_backup_vault_path(path: str) -> Dict[str, str]: + """Parses a backup_vault path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_source_path( + project: str, + location: str, + backupvault: str, + datasource: str, + ) -> str: + """Returns a fully-qualified data_source string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + + @staticmethod + def parse_data_source_path(path: str) -> Dict[str, str]: + """Parses a data_source path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def management_server_path( project: str, @@ -728,10 +857,10 @@ def sample_list_management_servers(): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -849,7 +978,7 @@ def sample_get_management_server(): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -960,10 +1089,9 @@ def sample_create_management_server(): management server instance. parent (str): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1178,6 +1306,2869 @@ def sample_delete_management_server(): # Done; return the response. return response + def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]): + The request object. Message for creating a BackupVault. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]): + The request object. Request message for listing + backupvault stores. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_vaults] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]): + The request object. Request message for fetching usable + BackupVaults. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchUsableBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]): + The request object. Request message for getting a + BackupVault. + name (str): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]): + The request object. Request message for updating a + BackupVault. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]): + The request object. Message for deleting a BackupVault. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]): + The request object. Request message for listing + DataSources. + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]): + The request object. Request message for getting a + DataSource instance. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]): + The request object. Request message for updating a data + source instance. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]): + The request object. Request message for listing Backups. + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]): + The request object. Request message for getting a Backup. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]): + The request object. Request message for updating a + Backup. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]): + The request object. Request message for restoring from a + Backup. + name (str): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]): + The request object. The request message for creating a ``BackupPlan``. + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]): + The request object. The request message for getting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]): + The request object. The request message for getting a list ``BackupPlan``. + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_plans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlansPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]): + The request object. The request message for deleting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): + The request object. Request message for creating a backup + plan. + parent (str): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): + The request object. Request message for List + BackupPlanAssociation + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlanAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]): + The request object. Request message for deleting a backup + plan association. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]): + The request object. Request message for triggering a + backup. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.trigger_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BackupDRClient": return self diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py index 462c609c80a8..3594ed629a56 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py @@ -38,7 +38,12 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) class ListManagementServersPager: @@ -191,3 +196,921 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupVaultsResponse], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsAsyncPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupVaultsResponse]], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.FetchUsableBackupVaultsResponse], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsAsyncPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.FetchUsableBackupVaultsResponse]], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListDataSourcesResponse], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListDataSourcesResponse]], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupsResponse], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupsResponse]], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplan.ListBackupPlansResponse], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplan.BackupPlan]: + for page in self.pages: + yield from page.backup_plans + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansAsyncPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupplan.ListBackupPlansResponse]], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplan.BackupPlan]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plans: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplanassociation.ListBackupPlanAssociationsResponse], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplanassociation.BackupPlanAssociation]: + for page in self.pages: + yield from page.backup_plan_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsAsyncPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse] + ], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplanassociation.BackupPlanAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plan_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 0b741d07dfc8..e11f0ea29379 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -29,7 +29,12 @@ from google.oauth2 import service_account # type: ignore from google.cloud.backupdr_v1 import gapic_version as package_version -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -170,6 +175,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method.wrap_method( + self.create_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method.wrap_method( + self.list_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method.wrap_method( + self.get_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method.wrap_method( + self.delete_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -225,6 +426,233 @@ def delete_management_server( ]: raise NotImplementedError() + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Union[ + backupvault.ListBackupVaultsResponse, + Awaitable[backupvault.ListBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Union[ + backupvault.FetchUsableBackupVaultsResponse, + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], + Union[backupvault.BackupVault, Awaitable[backupvault.BackupVault]], + ]: + raise NotImplementedError() + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Union[ + backupvault.ListDataSourcesResponse, + Awaitable[backupvault.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], + Union[backupvault.DataSource, Awaitable[backupvault.DataSource]], + ]: + raise NotImplementedError() + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], + Union[ + backupvault.ListBackupsResponse, Awaitable[backupvault.ListBackupsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [backupvault.GetBackupRequest], + Union[backupvault.Backup, Awaitable[backupvault.Backup]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRequest], + Union[backupplan.BackupPlan, Awaitable[backupplan.BackupPlan]], + ]: + raise NotImplementedError() + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Union[ + backupplan.ListBackupPlansResponse, + Awaitable[backupplan.ListBackupPlansResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Union[ + backupplanassociation.BackupPlanAssociation, + Awaitable[backupplanassociation.BackupPlanAssociation], + ], + ]: + raise NotImplementedError() + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Union[ + backupplanassociation.ListBackupPlanAssociationsResponse, + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index 39a429c884c0..63c287d5bfa1 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -26,7 +26,12 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -361,6 +366,637 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + ~.ListBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + ~.FetchUsableBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + ~.BackupVault]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + ~.BackupPlan]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + ~.ListBackupPlansResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + ~.BackupPlanAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + ~.ListBackupPlanAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 26b64ba6a60c..9acd2b61c3fb 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -28,7 +28,12 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport from .grpc import BackupDRGrpcTransport @@ -374,6 +379,665 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Awaitable[backupvault.ListBackupVaultsResponse], + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + Awaitable[~.ListBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + Awaitable[~.FetchUsableBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], Awaitable[backupvault.BackupVault] + ]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + Awaitable[~.BackupVault]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Awaitable[backupvault.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], Awaitable[backupvault.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], Awaitable[backupvault.ListBackupsResponse] + ]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], Awaitable[backupvault.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], Awaitable[backupplan.BackupPlan]]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + Awaitable[~.BackupPlan]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Awaitable[backupplan.ListBackupPlansResponse], + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + Awaitable[~.ListBackupPlansResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Awaitable[backupplanassociation.BackupPlanAssociation], + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + Awaitable[~.BackupPlanAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + Awaitable[~.ListBackupPlanAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -415,6 +1079,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method_async.wrap_method( + self.create_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method_async.wrap_method( + self.list_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method_async.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method_async.wrap_method( + self.get_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method_async.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method_async.wrap_method( + self.delete_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method_async.wrap_method( + self.list_data_sources, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method_async.wrap_method( + self.get_data_source, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method_async.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method_async.wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method_async.wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method_async.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method_async.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method_async.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method_async.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method_async.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method_async.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method_async.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method_async.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method_async.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method_async.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method_async.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method_async.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 621153ce0574..2ff0d1ccf3b9 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -47,7 +47,12 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import BackupDRTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -74,6 +79,30 @@ class BackupDRRestInterceptor: .. code-block:: python class MyCustomBackupDRInterceptor(BackupDRRestInterceptor): + def pre_create_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -82,6 +111,38 @@ def post_create_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +151,54 @@ def post_delete_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_usable_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_usable_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -98,6 +207,46 @@ def post_get_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_plan_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plan_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_plans(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plans(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_sources(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_management_servers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -106,28 +255,68 @@ def post_list_management_servers(self, response): logging.log(f"Received response: {response}") return response + def pre_restore_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_trigger_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_trigger_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_source(self, response): + logging.log(f"Received response: {response}") + return response + transport = BackupDRRestTransport(interceptor=MyCustomBackupDRInterceptor()) client = BackupDRClient(transport=transport) """ - def pre_create_management_server( + def pre_create_backup_plan( self, - request: backupdr.CreateManagementServerRequest, + request: backupplan.CreateBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_management_server + ) -> Tuple[backupplan.CreateBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_create_management_server( + def post_create_backup_plan( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_management_server + """Post-rpc interceptor for create_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -135,22 +324,25 @@ def post_create_management_server( """ return response - def pre_delete_management_server( + def pre_create_backup_plan_association( self, - request: backupdr.DeleteManagementServerRequest, + request: backupplanassociation.CreateBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_management_server + ) -> Tuple[ + backupplanassociation.CreateBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_management_server( + def post_create_backup_plan_association( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_management_server + """Post-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -158,22 +350,22 @@ def post_delete_management_server( """ return response - def pre_get_management_server( + def pre_create_backup_vault( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.CreateBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_management_server + ) -> Tuple[backupvault.CreateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_management_server( - self, response: backupdr.ManagementServer - ) -> backupdr.ManagementServer: - """Post-rpc interceptor for get_management_server + def post_create_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -181,22 +373,22 @@ def post_get_management_server( """ return response - def pre_list_management_servers( + def pre_create_management_server( self, - request: backupdr.ListManagementServersRequest, + request: backupdr.CreateManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_management_servers + ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_management_servers( - self, response: backupdr.ListManagementServersResponse - ) -> backupdr.ListManagementServersResponse: - """Post-rpc interceptor for list_management_servers + def post_create_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -204,22 +396,22 @@ def post_list_management_servers( """ return response - def pre_get_location( + def pre_delete_backup( self, - request: locations_pb2.GetLocationRequest, + request: backupvault.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location + ) -> Tuple[backupvault.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -227,22 +419,22 @@ def post_get_location( """ return response - def pre_list_locations( + def pre_delete_backup_plan( self, - request: locations_pb2.ListLocationsRequest, + request: backupplan.DeleteBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations + ) -> Tuple[backupplan.DeleteBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + def post_delete_backup_plan( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -250,20 +442,25 @@ def post_list_locations( """ return response - def pre_get_iam_policy( + def pre_delete_backup_plan_association( self, - request: iam_policy_pb2.GetIamPolicyRequest, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy + ) -> Tuple[ + backupplanassociation.DeleteBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy + def post_delete_backup_plan_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -271,20 +468,22 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_set_iam_policy( + def pre_delete_backup_vault( self, - request: iam_policy_pb2.SetIamPolicyRequest, + request: backupvault.DeleteBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy + ) -> Tuple[backupvault.DeleteBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy + def post_delete_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -292,22 +491,22 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_test_iam_permissions( + def pre_delete_management_server( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: backupdr.DeleteManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions + ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions + def post_delete_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -315,20 +514,22 @@ def post_test_iam_permissions( """ return response - def pre_cancel_operation( + def pre_fetch_usable_backup_vaults( self, - request: operations_pb2.CancelOperationRequest, + request: backupvault.FetchUsableBackupVaultsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation + ) -> Tuple[backupvault.FetchUsableBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation + def post_fetch_usable_backup_vaults( + self, response: backupvault.FetchUsableBackupVaultsResponse + ) -> backupvault.FetchUsableBackupVaultsResponse: + """Post-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -336,20 +537,18 @@ def post_cancel_operation(self, response: None) -> None: """ return response - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation + def pre_get_backup( + self, request: backupvault.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backupvault.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation + def post_get_backup(self, response: backupvault.Backup) -> backupvault.Backup: + """Post-rpc interceptor for get_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -357,22 +556,22 @@ def post_delete_operation(self, response: None) -> None: """ return response - def pre_get_operation( + def pre_get_backup_plan( self, - request: operations_pb2.GetOperationRequest, + request: backupplan.GetBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation + ) -> Tuple[backupplan.GetBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + def post_get_backup_plan( + self, response: backupplan.BackupPlan + ) -> backupplan.BackupPlan: + """Post-rpc interceptor for get_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -380,22 +579,24 @@ def post_get_operation( """ return response - def pre_list_operations( + def pre_get_backup_plan_association( self, - request: operations_pb2.ListOperationsRequest, + request: backupplanassociation.GetBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations + ) -> Tuple[ + backupplanassociation.GetBackupPlanAssociationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_get_backup_plan_association( + self, response: backupplanassociation.BackupPlanAssociation + ) -> backupplanassociation.BackupPlanAssociation: + """Post-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -403,164 +604,2784 @@ def post_list_operations( """ return response + def pre_get_backup_vault( + self, + request: backupvault.GetBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_vault -@dataclasses.dataclass -class BackupDRRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BackupDRRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + def post_get_backup_vault( + self, response: backupvault.BackupVault + ) -> backupvault.BackupVault: + """Post-rpc interceptor for get_backup_vault -class BackupDRRestTransport(BackupDRTransport): - """REST backend transport for BackupDR. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - The BackupDR Service + def pre_get_data_source( + self, + request: backupvault.GetDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - It sends JSON representations of protocol buffers over HTTP/1.1 + def post_get_data_source( + self, response: backupvault.DataSource + ) -> backupvault.DataSource: + """Post-rpc interceptor for get_data_source - """ + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - def __init__( + def pre_get_management_server( self, - *, - host: str = "backupdr.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[BackupDRRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + request: backupdr.GetManagementServerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_management_server - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + def post_get_management_server( + self, response: backupdr.ManagementServer + ) -> backupdr.ManagementServer: + """Post-rpc interceptor for get_management_server + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response - url_match_items = maybe_url_match.groupdict() + def pre_list_backup_plan_associations( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + backupplanassociation.ListBackupPlanAssociationsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_backup_plan_associations - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BackupDRRestInterceptor() - self._prep_wrapped_messages(client_info) + def post_list_backup_plan_associations( + self, response: backupplanassociation.ListBackupPlanAssociationsResponse + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + """Post-rpc interceptor for list_backup_plan_associations - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - This property caches on the instance; repeated calls return the same - client. + def pre_list_backup_plans( + self, + request: backupplan.ListBackupPlansRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplan.ListBackupPlansRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_plans + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ], - } + return request, metadata - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", - ) + def post_list_backup_plans( + self, response: backupplan.ListBackupPlansResponse + ) -> backupplan.ListBackupPlansResponse: + """Post-rpc interceptor for list_backup_plans - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, + request: backupvault.ListBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backups( + self, response: backupvault.ListBackupsResponse + ) -> backupvault.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backup_vaults( + self, + request: backupvault.ListBackupVaultsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backup_vaults( + self, response: backupvault.ListBackupVaultsResponse + ) -> backupvault.ListBackupVaultsResponse: + """Post-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_data_sources( + self, + request: backupvault.ListDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_data_sources( + self, response: backupvault.ListDataSourcesResponse + ) -> backupvault.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_management_servers( + self, + request: backupdr.ListManagementServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_management_servers( + self, response: backupdr.ListManagementServersResponse + ) -> backupdr.ListManagementServersResponse: + """Post-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_restore_backup( + self, + request: backupvault.RestoreBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.RestoreBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_restore_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_trigger_backup( + self, + request: backupplanassociation.TriggerBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplanassociation.TriggerBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_trigger_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, + request: backupvault.UpdateBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup_vault( + self, + request: backupvault.UpdateBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_data_source( + self, + request: backupvault.UpdateDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_data_source( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_source + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackupDRRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackupDRRestInterceptor + + +class BackupDRRestTransport(BackupDRTransport): + """REST backend transport for BackupDR. + + The BackupDR Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BackupDRRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackupDRRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.CreateBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan method over HTTP. + + Args: + request (~.backupplan.CreateBackupPlanRequest): + The request object. The request message for creating a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + "body": "backup_plan", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan( + request, metadata + ) + pb_request = backupplan.CreateBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan(resp) + return resp + + class _CreateBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanAssociationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.CreateBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.CreateBackupPlanAssociationRequest): + The request object. Request message for creating a backup + plan. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + "body": "backup_plan_association", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan_association(resp) + return resp + + class _CreateBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupVaultId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.CreateBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup vault method over HTTP. + + Args: + request (~.backupvault.CreateBackupVaultRequest): + The request object. Message for creating a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + "body": "backup_vault", + }, + ] + request, metadata = self._interceptor.pre_create_backup_vault( + request, metadata + ) + pb_request = backupvault.CreateBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_vault(resp) + return resp + + class _CreateManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("CreateManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "managementServerId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.CreateManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create management server method over HTTP. + + Args: + request (~.backupdr.CreateManagementServerRequest): + The request object. Request message for creating a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "body": "management_server", + }, + ] + request, metadata = self._interceptor.pre_create_management_server( + request, metadata + ) + pb_request = backupdr.CreateManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_management_server(resp) + return resp + + class _DeleteBackup(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.backupvault.DeleteBackupRequest): + The request object. Message for deleting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = backupvault.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.DeleteBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan method over HTTP. + + Args: + request (~.backupplan.DeleteBackupPlanRequest): + The request object. The request message for deleting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan( + request, metadata + ) + pb_request = backupplan.DeleteBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan(resp) + return resp + + class _DeleteBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.DeleteBackupPlanAssociationRequest): + The request object. Request message for deleting a backup + plan association. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan_association(resp) + return resp + + class _DeleteBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup vault method over HTTP. + + Args: + request (~.backupvault.DeleteBackupVaultRequest): + The request object. Message for deleting a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_vault( + request, metadata + ) + pb_request = backupvault.DeleteBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_vault(resp) + return resp + + class _DeleteManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("DeleteManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.DeleteManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete management server method over HTTP. + + Args: + request (~.backupdr.DeleteManagementServerRequest): + The request object. Request message for deleting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_management_server( + request, metadata + ) + pb_request = backupdr.DeleteManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_management_server(resp) + return resp + + class _FetchUsableBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("FetchUsableBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.FetchUsableBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.FetchUsableBackupVaultsResponse: + r"""Call the fetch usable backup + vaults method over HTTP. + + Args: + request (~.backupvault.FetchUsableBackupVaultsRequest): + The request object. Request message for fetching usable + BackupVaults. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.FetchUsableBackupVaultsResponse: + Response message for fetching usable + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable", + }, + ] + request, metadata = self._interceptor.pre_fetch_usable_backup_vaults( + request, metadata + ) + pb_request = backupvault.FetchUsableBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.FetchUsableBackupVaultsResponse() + pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_usable_backup_vaults(resp) + return resp + + class _GetBackup(BackupDRRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.backupvault.GetBackupRequest): + The request object. Request message for getting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.Backup: + Message describing a Backup object. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = backupvault.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.Backup() + pb_resp = backupvault.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.GetBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Call the get backup plan method over HTTP. + + Args: + request (~.backupplan.GetBackupPlanRequest): + The request object. The request message for getting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.BackupPlan: + A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` + messages. Each ``BackupRule`` has a retention policy and + defines a schedule by which the system is to perform + backup workloads. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) + pb_request = backupplan.GetBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.BackupPlan() + pb_resp = backupplan.BackupPlan.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan(resp) + return resp + + class _GetBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.GetBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Call the get backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.GetBackupPlanAssociationRequest): + The request object. Request message for getting a + BackupPlanAssociation resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.GetBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.BackupPlanAssociation() + pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan_association(resp) + return resp + + class _GetBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Call the get backup vault method over HTTP. + + Args: + request (~.backupvault.GetBackupVaultRequest): + The request object. Request message for getting a + BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.BackupVault: + Message describing a BackupVault + object. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_vault( + request, metadata + ) + pb_request = backupvault.GetBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.BackupVault() + pb_resp = backupvault.BackupVault.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_vault(resp) + return resp + + class _GetDataSource(BackupDRRestStub): + def __hash__(self): + return hash("GetDataSource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetDataSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Call the get data source method over HTTP. + + Args: + request (~.backupvault.GetDataSourceRequest): + The request object. Request message for getting a + DataSource instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = backupvault.GetDataSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.DataSource() + pb_resp = backupvault.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_source(resp) + return resp + + class _GetManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("GetManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.GetManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ManagementServer: + r"""Call the get management server method over HTTP. + + Args: + request (~.backupdr.GetManagementServerRequest): + The request object. Request message for getting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ManagementServer: + ManagementServer describes a single + BackupDR ManagementServer instance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_management_server( + request, metadata + ) + pb_request = backupdr.GetManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ManagementServer() + pb_resp = backupdr.ManagementServer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_management_server(resp) + return resp + + class _ListBackupPlanAssociations(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlanAssociations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + r"""Call the list backup plan + associations method over HTTP. + + Args: + request (~.backupplanassociation.ListBackupPlanAssociationsRequest): + The request object. Request message for List + BackupPlanAssociation + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.ListBackupPlanAssociationsResponse: + Response message for List + BackupPlanAssociation + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plan_associations( + request, metadata + ) + pb_request = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.ListBackupPlanAssociationsResponse() + pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plan_associations(resp) + return resp + + class _ListBackupPlans(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlans") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.ListBackupPlansRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.ListBackupPlansResponse: + r"""Call the list backup plans method over HTTP. + + Args: + request (~.backupplan.ListBackupPlansRequest): + The request object. The request message for getting a list ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.ListBackupPlansResponse: + The response message for getting a list of + ``BackupPlan``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plans( + request, metadata + ) + pb_request = backupplan.ListBackupPlansRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.ListBackupPlansResponse() + pb_resp = backupplan.ListBackupPlansResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plans(resp) + return resp + + class _ListBackups(BackupDRRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backupvault.ListBackupsRequest): + The request object. Request message for listing Backups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupsResponse: + Response message for listing Backups. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = backupvault.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupsResponse() + pb_resp = backupvault.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupVaultsResponse: + r"""Call the list backup vaults method over HTTP. + + Args: + request (~.backupvault.ListBackupVaultsRequest): + The request object. Request message for listing + backupvault stores. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupVaultsResponse: + Response message for listing + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + }, + ] + request, metadata = self._interceptor.pre_list_backup_vaults( + request, metadata + ) + pb_request = backupvault.ListBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupVaultsResponse() + pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_vaults(resp) + return resp + + class _ListDataSources(BackupDRRestStub): + def __hash__(self): + return hash("ListDataSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. + + Args: + request (~.backupvault.ListDataSourcesRequest): + The request object. Request message for listing + DataSources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListDataSourcesResponse: + Response message for listing + DataSources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources", + }, + ] + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) + pb_request = backupvault.ListDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListDataSourcesResponse() + pb_resp = backupvault.ListDataSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_sources(resp) + return resp + + class _ListManagementServers(BackupDRRestStub): + def __hash__(self): + return hash("ListManagementServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.ListManagementServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ListManagementServersResponse: + r"""Call the list management servers method over HTTP. + + Args: + request (~.backupdr.ListManagementServersRequest): + The request object. Request message for listing + management servers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ListManagementServersResponse: + Response message for listing + management servers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + }, + ] + request, metadata = self._interceptor.pre_list_management_servers( + request, metadata + ) + pb_request = backupdr.ListManagementServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ListManagementServersResponse() + pb_resp = backupdr.ListManagementServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_management_servers(resp) + return resp + + class _RestoreBackup(BackupDRRestStub): + def __hash__(self): + return hash("RestoreBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.RestoreBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore backup method over HTTP. + + Args: + request (~.backupvault.RestoreBackupRequest): + The request object. Request message for restoring from a + Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_backup(request, metadata) + pb_request = backupvault.RestoreBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - # Return the client from cache. - return self._operations_client + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_backup(resp) + return resp - class _CreateManagementServer(BackupDRRestStub): + class _TriggerBackup(BackupDRRestStub): def __hash__(self): - return hash("CreateManagementServer") + return hash("TriggerBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "managementServerId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -572,18 +3393,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.CreateManagementServerRequest, + request: backupplanassociation.TriggerBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create management server method over HTTP. + r"""Call the trigger backup method over HTTP. Args: - request (~.backupdr.CreateManagementServerRequest): - The request object. Request message for creating a - management server instance. + request (~.backupplanassociation.TriggerBackupRequest): + The request object. Request message for triggering a + backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -601,14 +3422,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", - "body": "management_server", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup", + "body": "*", }, ] - request, metadata = self._interceptor.pre_create_management_server( - request, metadata - ) - pb_request = backupdr.CreateManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_trigger_backup(request, metadata) + pb_request = backupplanassociation.TriggerBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -649,14 +3468,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_management_server(resp) + resp = self._interceptor.post_trigger_backup(resp) return resp - class _DeleteManagementServer(BackupDRRestStub): + class _UpdateBackup(BackupDRRestStub): def __hash__(self): - return hash("DeleteManagementServer") + return hash("UpdateBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -668,18 +3489,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.DeleteManagementServerRequest, + request: backupvault.UpdateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete management server method over HTTP. + r"""Call the update backup method over HTTP. Args: - request (~.backupdr.DeleteManagementServerRequest): - The request object. Request message for deleting a - management server instance. + request (~.backupvault.UpdateBackupRequest): + The request object. Request message for updating a + Backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -696,16 +3517,20 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + "body": "backup", }, ] - request, metadata = self._interceptor.pre_delete_management_server( - request, metadata - ) - pb_request = backupdr.DeleteManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = backupvault.UpdateBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -728,6 +3553,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -738,14 +3564,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_management_server(resp) + resp = self._interceptor.post_update_backup(resp) return resp - class _GetManagementServer(BackupDRRestStub): + class _UpdateBackupVault(BackupDRRestStub): def __hash__(self): - return hash("GetManagementServer") + return hash("UpdateBackupVault") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -757,18 +3585,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.UpdateBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ManagementServer: - r"""Call the get management server method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update backup vault method over HTTP. Args: - request (~.backupdr.GetManagementServerRequest): - The request object. Request message for getting a - management server instance. + request (~.backupvault.UpdateBackupVaultRequest): + The request object. Request message for updating a + BackupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -776,24 +3604,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}", + "body": "backup_vault", }, ] - request, metadata = self._interceptor.pre_get_management_server( + request, metadata = self._interceptor.pre_update_backup_vault( request, metadata ) - pb_request = backupdr.GetManagementServerRequest.pb(request) + pb_request = backupvault.UpdateBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -816,6 +3651,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -824,18 +3660,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ManagementServer() - pb_resp = backupdr.ManagementServer.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_management_server(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_vault(resp) return resp - class _ListManagementServers(BackupDRRestStub): + class _UpdateDataSource(BackupDRRestStub): def __hash__(self): - return hash("ListManagementServers") + return hash("UpdateDataSource") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -847,18 +3683,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.ListManagementServersRequest, + request: backupvault.UpdateDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ListManagementServersResponse: - r"""Call the list management servers method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update data source method over HTTP. Args: - request (~.backupdr.ListManagementServersRequest): - The request object. Request message for listing - management servers. + request (~.backupvault.UpdateDataSourceRequest): + The request object. Request message for updating a data + source instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -866,24 +3702,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ListManagementServersResponse: - Response message for listing - management servers. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "method": "patch", + "uri": "/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}", + "body": "data_source", }, ] - request, metadata = self._interceptor.pre_list_management_servers( + request, metadata = self._interceptor.pre_update_data_source( request, metadata ) - pb_request = backupdr.ListManagementServersRequest.pb(request) + pb_request = backupvault.UpdateDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -906,6 +3749,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -914,13 +3758,38 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ListManagementServersResponse() - pb_resp = backupdr.ListManagementServersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_management_servers(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_data_source(resp) return resp + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def create_management_server( self, @@ -929,6 +3798,41 @@ def create_management_server( # In C++ this would require a dynamic_cast return self._CreateManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def delete_management_server( self, @@ -937,6 +3841,60 @@ def delete_management_server( # In C++ this would require a dynamic_cast return self._DeleteManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchUsableBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_management_server( self, @@ -945,6 +3903,55 @@ def get_management_server( # In C++ this would require a dynamic_cast return self._GetManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlanAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlans(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore + @property def list_management_servers( self, @@ -955,6 +3962,48 @@ def list_management_servers( # In C++ this would require a dynamic_cast return self._ListManagementServers(self._session, self._host, self._interceptor) # type: ignore + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TriggerBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 3afc31268ba2..951186d655ee 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -26,6 +26,92 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .backupvault_ba import BackupApplianceBackupProperties +from .backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "CreateManagementServerRequest", @@ -39,4 +125,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index 07ad09a753bc..2cbce001d7d5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -195,7 +195,7 @@ class ManagementServer(proto.Message): oauth2_client_id (str): Output only. The OAuth 2.0 client id is required to make API calls to the BackupDR instance API of this ManagementServer. - This is the value that should be provided in the ‘aud’ field + This is the value that should be provided in the 'aud' field of the OIDC ID Token (see openid specification https://openid.net/specs/openid-connect-core-1_0.html#IDToken). workforce_identity_based_oauth2_client_id (google.cloud.backupdr_v1.types.WorkforceIdentityBasedOAuth2ClientID): @@ -350,10 +350,10 @@ class ListManagementServersRequest(proto.Message): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for example **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + locations, use "-" for the '{location}' value. page_size (int): Optional. Requested page size. Server may return fewer items than requested. If @@ -401,15 +401,16 @@ class ListManagementServersResponse(proto.Message): Attributes: management_servers (MutableSequence[google.cloud.backupdr_v1.types.ManagementServer]): - The list of ManagementServer instances in the project for - the specified location. - - If the ``{location}`` value in the request is "-", the - response contains a list of instances from all locations. In - case any location is unreachable, the response will only - return management servers in reachable locations and the - 'unreachable' field will be populated with a list of - unreachable locations. + The list of ManagementServer instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + management servers in reachable locations and + the 'unreachable' field will be populated with a + list of unreachable locations. next_page_token (str): A token identifying a page of results the server should return. @@ -443,7 +444,7 @@ class GetManagementServerRequest(proto.Message): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' """ name: str = proto.Field( @@ -458,9 +459,9 @@ class CreateManagementServerRequest(proto.Message): Attributes: parent (str): Required. The management server project and location in the - format ``projects/{project_id}/locations/{location}``. In - Cloud Backup and DR locations map to GCP regions, for - example **us-central1**. + format 'projects/{project_id}/locations/{location}'. In + Cloud Backup and DR locations map to Google Cloud regions, + for example **us-central1**. management_server_id (str): Required. The name of the management server to create. The name must be unique for the @@ -571,7 +572,7 @@ class OperationMetadata(proto.Message): cancellation of the operation. Operations that have successfully been cancelled have [Operation.error][] value with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + 1, corresponding to 'Code.CANCELLED'. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py new file mode 100644 index 000000000000..ca0eabb6d9f1 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlan", + "BackupRule", + "StandardSchedule", + "BackupWindow", + "WeekDayOfMonth", + "CreateBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "GetBackupPlanRequest", + "DeleteBackupPlanRequest", + }, +) + + +class BackupPlan(proto.Message): + r"""A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` messages. Each + ``BackupRule`` has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + ``BackupPlan``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + description (str): + Optional. The description of the ``BackupPlan`` resource. + + The description allows for additional details about + ``BackupPlan`` and its use cases to be provided. An example + description is the following: "This is a backup plan that + performs a daily backup at 6pm and retains data for 3 + months". The description must be at most 2048 characters. + labels (MutableMapping[str, str]): + Optional. This collection of key/value pairs + allows for custom labels to be supplied by the + user. Example, {"tag": "Weekly"}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was last updated. + backup_rules (MutableSequence[google.cloud.backupdr_v1.types.BackupRule]): + Required. The backup rules for this ``BackupPlan``. There + must be at least one ``BackupRule`` message. + state (google.cloud.backupdr_v1.types.BackupPlan.State): + Output only. The ``State`` for the ``BackupPlan``. + resource_type (str): + Required. The resource type to which the ``BackupPlan`` will + be applied. Examples include, + "compute.googleapis.com/Instance" and + "storage.googleapis.com/Bucket". + etag (str): + Optional. ``etag`` is returned from the service in the + response. As a user of the service, you may provide an etag + value in this field to prevent stale resources. + backup_vault (str): + Required. Resource name of backup vault which + will be used as storage location for backups. + Format: + + projects/{project}/locations/{location}/backupVaults/{backupvault} + backup_vault_service_account (str): + Output only. The Google Cloud Platform + Service Account to be used by the BackupVault + for taking backups. Specify the email address of + the Backup Vault Service Account. + """ + + class State(proto.Enum): + r"""``State`` enumerates the possible states for a ``BackupPlan``. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + backup_rules: MutableSequence["BackupRule"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="BackupRule", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + resource_type: str = proto.Field( + proto.STRING, + number=8, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + backup_vault: str = proto.Field( + proto.STRING, + number=10, + ) + backup_vault_service_account: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackupRule(proto.Message): + r"""``BackupRule`` binds the backup schedule to a retention policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rule_id (str): + Required. Immutable. The unique id of this ``BackupRule``. + The ``rule_id`` is unique per ``BackupPlan``.The ``rule_id`` + must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_retention_days (int): + Required. Configures the duration for which + backup data will be kept. It is defined in + “days”. The value should be greater than or + equal to minimum enforced retention of the + backup vault. + standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): + Required. Defines a schedule that runs within + the confines of a defined window of time. + + This field is a member of `oneof`_ ``backup_schedule_oneof``. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + backup_retention_days: int = proto.Field( + proto.INT32, + number=4, + ) + standard_schedule: "StandardSchedule" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_schedule_oneof", + message="StandardSchedule", + ) + + +class StandardSchedule(proto.Message): + r"""``StandardSchedule`` defines a schedule that run within the confines + of a defined window of days. We can define recurrence type for + schedule as HOURLY, DAILY, WEEKLY, MONTHLY or YEARLY. + + Attributes: + recurrence_type (google.cloud.backupdr_v1.types.StandardSchedule.RecurrenceType): + Required. Specifies the ``RecurrenceType`` for the schedule. + hourly_frequency (int): + Optional. Specifies frequency for hourly backups. A hourly + frequency of 2 means jobs will run every 2 hours from start + time till end time defined. + + This is required for ``recurrence_type``, ``HOURLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``HOURLY``. + + Value of hourly frequency should be between 6 and 23. + + Reason for limit : We found that there is bandwidth + limitation of 3GB/S for GMI while taking a backup and 5GB/S + while doing a restore. Given the amount of parallel backups + and restore we are targeting, this will potentially take the + backup time to mins and hours (in worst case scenario). + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Specifies days of week like, MONDAY or TUESDAY, on + which jobs will run. + + This is required for ``recurrence_type``, ``WEEKLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``WEEKLY``. + days_of_month (MutableSequence[int]): + Optional. Specifies days of months like 1, 5, or 14 on which + jobs will run. + + Values for ``days_of_month`` are only applicable for + ``recurrence_type``, ``MONTHLY`` and ``YEARLY``. A + validation error will occur if other values are supplied. + week_day_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth): + Optional. Specifies a week day of the month like, FIRST + SUNDAY or LAST MONDAY, on which jobs will run. This will be + specified by two fields in ``WeekDayOfMonth``, one for the + day, e.g. ``MONDAY``, and one for the week, e.g. ``LAST``. + + This field is only applicable for ``recurrence_type``, + ``MONTHLY`` and ``YEARLY``. A validation error will occur if + other values are supplied. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Specifies the months of year, like ``FEBRUARY`` + and/or ``MAY``, on which jobs will run. + + This field is only applicable when ``recurrence_type`` is + ``YEARLY``. A validation error will occur if other values + are supplied. + backup_window (google.cloud.backupdr_v1.types.BackupWindow): + Required. A BackupWindow defines the window of day during + which backup jobs will run. Jobs are queued at the beginning + of the window and will be marked as ``NOT_RUN`` if they do + not start by the end of the window. + + Note: running jobs will not be cancelled at the end of the + window. + time_zone (str): + Required. The time zone to be used when interpreting the + schedule. The value of this field must be a time zone name + from the IANA tz database. See + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + for the list of valid timezone names. For e.g., + Europe/Paris. + """ + + class RecurrenceType(proto.Enum): + r"""``RecurrenceTypes`` enumerates the applicable periodicity for the + schedule. + + Values: + RECURRENCE_TYPE_UNSPECIFIED (0): + recurrence type not set + HOURLY (1): + The ``BackupRule`` is to be applied hourly. + DAILY (2): + The ``BackupRule`` is to be applied daily. + WEEKLY (3): + The ``BackupRule`` is to be applied weekly. + MONTHLY (4): + The ``BackupRule`` is to be applied monthly. + YEARLY (5): + The ``BackupRule`` is to be applied yearly. + """ + RECURRENCE_TYPE_UNSPECIFIED = 0 + HOURLY = 1 + DAILY = 2 + WEEKLY = 3 + MONTHLY = 4 + YEARLY = 5 + + recurrence_type: RecurrenceType = proto.Field( + proto.ENUM, + number=1, + enum=RecurrenceType, + ) + hourly_frequency: int = proto.Field( + proto.INT32, + number=2, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + days_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=4, + ) + week_day_of_month: "WeekDayOfMonth" = proto.Field( + proto.MESSAGE, + number=5, + message="WeekDayOfMonth", + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=month_pb2.Month, + ) + backup_window: "BackupWindow" = proto.Field( + proto.MESSAGE, + number=7, + message="BackupWindow", + ) + time_zone: str = proto.Field( + proto.STRING, + number=8, + ) + + +class BackupWindow(proto.Message): + r"""``BackupWindow`` defines a window of the day during which backup + jobs will run. + + Attributes: + start_hour_of_day (int): + Required. The hour of day (0-23) when the + window starts for e.g. if value of start hour of + day is 6 that mean backup window start at 6:00. + end_hour_of_day (int): + Required. The hour of day (1-24) when the window end for + e.g. if value of end hour of day is 10 that mean backup + window end time is 10:00. + + End hour of day should be greater than start hour of day. 0 + <= start_hour_of_day < end_hour_of_day <= 24 + + End hour of day is not include in backup window that mean if + end_hour_of_day= 10 jobs should start before 10:00. + """ + + start_hour_of_day: int = proto.Field( + proto.INT32, + number=1, + ) + end_hour_of_day: int = proto.Field( + proto.INT32, + number=2, + ) + + +class WeekDayOfMonth(proto.Message): + r"""``WeekDayOfMonth`` defines the week day of the month on which the + backups will run. The message combines a ``WeekOfMonth`` and + ``DayOfWeek`` to produce values like ``FIRST``/``MONDAY`` or + ``LAST``/``FRIDAY``. + + Attributes: + week_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth.WeekOfMonth): + Required. Specifies the week of the month. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Required. Specifies the day of the week. + """ + + class WeekOfMonth(proto.Enum): + r"""``WeekOfMonth`` enumerates possible weeks in the month, e.g. the + first, third, or last week of the month. + + Values: + WEEK_OF_MONTH_UNSPECIFIED (0): + The zero value. Do not use. + FIRST (1): + The first week of the month. + SECOND (2): + The second week of the month. + THIRD (3): + The third week of the month. + FOURTH (4): + The fourth week of the month. + LAST (5): + The last week of the month. + """ + WEEK_OF_MONTH_UNSPECIFIED = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + FOURTH = 4 + LAST = 5 + + week_of_month: WeekOfMonth = proto.Field( + proto.ENUM, + number=1, + enum=WeekOfMonth, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class CreateBackupPlanRequest(proto.Message): + r"""The request message for creating a ``BackupPlan``. + + Attributes: + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The name + must be unique for the specified project and location.The + name must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan: "BackupPlan" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlan", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlansRequest(proto.Message): + r"""The request message for getting a list ``BackupPlan``. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all locations, + use "-" for the ``{location}`` value. + page_size (int): + Optional. The maximum number of ``BackupPlans`` to return in + a single response. If not specified, a default value will be + chosen by the service. Note that the response may include a + partial list and a caller should only rely on the response's + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + Optional. The value of + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + received from a previous ``ListBackupPlans`` call. Provide + this to retrieve the subsequent page in a multi-page list of + results. When paginating, all other parameters provided to + ``ListBackupPlans`` must match the call that provided the + page token. + filter (str): + Optional. Field match expression used to + filter the results. + order_by (str): + Optional. Field by which to sort the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupPlansResponse(proto.Message): + r"""The response message for getting a list of ``BackupPlan``. + + Attributes: + backup_plans (MutableSequence[google.cloud.backupdr_v1.types.BackupPlan]): + The list of ``BackupPlans`` in the project for the specified + location. + + If the ``{location}`` value in the request is "-", the + response contains a list of resources from all locations. In + case any location is unreachable, the response will only + return backup plans in reachable locations and the + 'unreachable' field will be populated with a list of + unreachable locations. BackupPlan + next_page_token (str): + A token which may be sent as + [page_token][google.cloud.backupdr.v1.ListBackupPlansRequest.page_token] + in a subsequent ``ListBackupPlans`` call to retrieve the + next page of results. If this field is omitted or empty, + then there are no more results to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plans: MutableSequence["BackupPlan"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlan", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanRequest(proto.Message): + r"""The request message for getting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanRequest(proto.Message): + r"""The request message for deleting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py new file mode 100644 index 000000000000..23a4309a3fd8 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlanAssociation", + "RuleConfigInfo", + "CreateBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "GetBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "TriggerBackupRequest", + }, +) + + +class BackupPlanAssociation(proto.Message): + r"""A BackupPlanAssociation represents a single + BackupPlanAssociation which contains details like workload, + backup plan etc + + Attributes: + name (str): + Output only. Identifier. The resource name of + BackupPlanAssociation in below format Format : + + projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId} + resource_type (str): + Optional. Resource type of workload on which + backupplan is applied + resource (str): + Required. Immutable. Resource name of + workload on which backupplan is applied + backup_plan (str): + Required. Resource name of backup plan which + needs to be applied on workload. Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + state (google.cloud.backupdr_v1.types.BackupPlanAssociation.State): + Output only. The BackupPlanAssociation + resource state. + rules_config_info (MutableSequence[google.cloud.backupdr_v1.types.RuleConfigInfo]): + Output only. The config info related to + backup rules. + data_source (str): + Output only. Output Only. + + Resource name of data source which will be used + as storage location for backups taken. + Format : + + projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} + """ + + class State(proto.Enum): + r"""Enum for State of BackupPlan Association + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + rules_config_info: MutableSequence["RuleConfigInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="RuleConfigInfo", + ) + data_source: str = proto.Field( + proto.STRING, + number=9, + ) + + +class RuleConfigInfo(proto.Message): + r"""Message for rules config info. + + Attributes: + rule_id (str): + Output only. Output Only. + + Backup Rule id fetched from backup plan. + last_backup_state (google.cloud.backupdr_v1.types.RuleConfigInfo.LastBackupState): + Output only. The last backup state for rule. + last_backup_error (google.rpc.status_pb2.Status): + Output only. Output Only. + + google.rpc.Status object to store the last + backup error. + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when the last + successful backup was captured from the source. + """ + + class LastBackupState(proto.Enum): + r"""Enum for LastBackupState + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + State not set. + FIRST_BACKUP_PENDING (1): + The first backup is pending. + PERMISSION_DENIED (2): + The most recent backup could not be + run/failed because of the lack of permissions. + SUCCEEDED (3): + The last backup operation succeeded. + FAILED (4): + The last backup operation failed. + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + PERMISSION_DENIED = 2 + SUCCEEDED = 3 + FAILED = 4 + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=3, + enum=LastBackupState, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class CreateBackupPlanAssociationRequest(proto.Message): + r"""Request message for creating a backup plan. + + Attributes: + parent (str): + Required. The backup plan association project and location + in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be unique + for the specified project and location. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: "BackupPlanAssociation" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlanAssociation", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsRequest(proto.Message): + r"""Request message for List BackupPlanAssociation + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations for + all locations, use "-" for the ``{location}`` value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsResponse(proto.Message): + r"""Response message for List BackupPlanAssociation + + Attributes: + backup_plan_associations (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanAssociation]): + The list of Backup Plan Associations in the project for the + specified location. + + If the ``{location}`` value in the request is "-", the + response contains a list of instances from all locations. In + case any location is unreachable, the response will only + return backup plan associations in reachable locations and + the 'unreachable' field will be populated with a list of + unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plan_associations: MutableSequence[ + "BackupPlanAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlanAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanAssociationRequest(proto.Message): + r"""Request message for getting a BackupPlanAssociation resource. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanAssociationRequest(proto.Message): + r"""Request message for deleting a backup plan association. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TriggerBackupRequest(proto.Message): + r"""Request message for triggering a backup. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py new file mode 100644 index 000000000000..ced3cd195702 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -0,0 +1,2065 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import backupvault_ba, backupvault_gce + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupConfigState", + "BackupView", + "BackupVaultView", + "BackupVault", + "DataSource", + "BackupConfigInfo", + "GcpBackupConfig", + "BackupApplianceBackupConfig", + "DataSourceGcpResource", + "DataSourceBackupApplianceApplication", + "ServiceLockInfo", + "BackupApplianceLockInfo", + "BackupLock", + "Backup", + "CreateBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GetBackupVaultRequest", + "UpdateBackupVaultRequest", + "DeleteBackupVaultRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "GetDataSourceRequest", + "UpdateDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "RestoreBackupRequest", + "RestoreBackupResponse", + "TargetResource", + "GcpResource", + }, +) + + +class BackupConfigState(proto.Enum): + r"""Backup configuration state. Is the resource configured for + backup? + + Values: + BACKUP_CONFIG_STATE_UNSPECIFIED (0): + The possible states of backup configuration. + Status not set. + ACTIVE (1): + The data source is actively protected (i.e. + there is a BackupPlanAssociation or Appliance + SLA pointing to it) + PASSIVE (2): + The data source is no longer protected (but + may have backups under it) + """ + BACKUP_CONFIG_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PASSIVE = 2 + + +class BackupView(proto.Enum): + r"""BackupView contains enum options for Partial and Full view. + + Values: + BACKUP_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VIEW_BASIC (1): + Includes basic data about the Backup, but not + the full contents. + BACKUP_VIEW_FULL (2): + Includes all data about the Backup. + This is the default value (for both ListBackups + and GetBackup). + """ + BACKUP_VIEW_UNSPECIFIED = 0 + BACKUP_VIEW_BASIC = 1 + BACKUP_VIEW_FULL = 2 + + +class BackupVaultView(proto.Enum): + r"""BackupVaultView contains enum options for Partial and Full + view. + + Values: + BACKUP_VAULT_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VAULT_VIEW_BASIC (1): + Includes basic data about the Backup Vault, + but not the full contents. + BACKUP_VAULT_VIEW_FULL (2): + Includes all data about the Backup Vault. + This is the default value (for both + ListBackupVaults and GetBackupVault). + """ + BACKUP_VAULT_VIEW_UNSPECIFIED = 0 + BACKUP_VAULT_VIEW_BASIC = 1 + BACKUP_VAULT_VIEW_FULL = 2 + + +class BackupVault(proto.Message): + r"""Message describing a BackupVault object. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup vault to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}"``. + ``{backupvault}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the project and location. + description (str): + Optional. The description of the BackupVault + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_minimum_enforced_retention_duration (google.protobuf.duration_pb2.Duration): + Required. The default and minimum enforced + retention for each backup within the backup + vault. The enforced retention for each backup + can be extended. + + This field is a member of `oneof`_ ``_backup_minimum_enforced_retention_duration``. + deletable (bool): + Output only. Set to true when there are no + backups nested under this resource. + + This field is a member of `oneof`_ ``_deletable``. + etag (str): + Optional. Server specified ETag for the + backup vault resource to prevent simultaneous + updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.BackupVault.State): + Output only. The BackupVault resource + instance state. + effective_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time after which the BackupVault + resource is locked. + + This field is a member of `oneof`_ ``_effective_time``. + backup_count (int): + Output only. The number of backups in this + backup vault. + service_account (str): + Output only. Service account used by the + BackupVault Service for this BackupVault. The + user should grant this account permissions in + their workload project to enable the service to + run backups and restores there. + total_stored_bytes (int): + Output only. Total size of the storage used + by all backup resources. + uid (str): + Output only. Output only + Immutable after resource creation until resource + deletion. + annotations (MutableMapping[str, str]): + Optional. User annotations. See + https://google.aip.dev/128#annotations Stores + small amounts of arbitrary data. + access_restriction (google.cloud.backupdr_v1.types.BackupVault.AccessRestriction): + Optional. Note: This field is added for future use case and + will not be supported in the current release. + + Optional. + + Access restriction for the backup vault. Default value is + WITHIN_ORGANIZATION if not provided during creation. + """ + + class State(proto.Enum): + r"""Holds the state of the backup vault resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup vault is being created. + ACTIVE (2): + The backup vault has been created and is + fully usable. + DELETING (3): + The backup vault is being deleted. + ERROR (4): + The backup vault is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class AccessRestriction(proto.Enum): + r"""Holds the access restriction for the backup vault. + + Values: + ACCESS_RESTRICTION_UNSPECIFIED (0): + Access restriction not set. + WITHIN_PROJECT (1): + Access to or from resources outside your + current project will be denied. + WITHIN_ORGANIZATION (2): + Access to or from resources outside your + current organization will be denied. + UNRESTRICTED (3): + No access restriction. + """ + ACCESS_RESTRICTION_UNSPECIFIED = 0 + WITHIN_PROJECT = 1 + WITHIN_ORGANIZATION = 2 + UNRESTRICTED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_minimum_enforced_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=20, + optional=True, + message=duration_pb2.Duration, + ) + deletable: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + effective_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=17, + ) + service_account: str = proto.Field( + proto.STRING, + number=18, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=19, + ) + uid: str = proto.Field( + proto.STRING, + number=21, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22, + ) + access_restriction: AccessRestriction = proto.Field( + proto.ENUM, + number=24, + enum=AccessRestriction, + ) + + +class DataSource(proto.Message): + r"""Message describing a DataSource object. + Datasource object used to represent Datasource details for both + admin and basic view. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the datasource to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}"``. + ``{datasource}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the backup vault. + state (google.cloud.backupdr_v1.types.DataSource.State): + Output only. The DataSource resource instance + state. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_count (int): + Number of backups in the data source. + + This field is a member of `oneof`_ ``_backup_count``. + etag (str): + Server specified ETag for the + ManagementServer resource to prevent + simultaneous updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + total_stored_bytes (int): + The number of bytes (metadata and data) + stored in this datasource. + + This field is a member of `oneof`_ ``_total_stored_bytes``. + config_state (google.cloud.backupdr_v1.types.BackupConfigState): + Output only. The backup configuration state. + backup_config_info (google.cloud.backupdr_v1.types.BackupConfigInfo): + Output only. Details of how the resource is + configured for backup. + data_source_gcp_resource (google.cloud.backupdr_v1.types.DataSourceGcpResource): + The backed up resource is a Google Cloud + resource. The word 'DataSource' was included in + the names to indicate that this is the + representation of the Google Cloud resource used + within the DataSource object. + + This field is a member of `oneof`_ ``source_resource``. + data_source_backup_appliance_application (google.cloud.backupdr_v1.types.DataSourceBackupApplianceApplication): + The backed up resource is a backup appliance + application. + + This field is a member of `oneof`_ ``source_resource``. + """ + + class State(proto.Enum): + r"""Holds the state of the data source resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The data source is being created. + ACTIVE (2): + The data source has been created and is fully + usable. + DELETING (3): + The data source is being deleted. + ERROR (4): + The data source is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=21, + enum=State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=23, + optional=True, + ) + config_state: "BackupConfigState" = proto.Field( + proto.ENUM, + number=24, + enum="BackupConfigState", + ) + backup_config_info: "BackupConfigInfo" = proto.Field( + proto.MESSAGE, + number=25, + message="BackupConfigInfo", + ) + data_source_gcp_resource: "DataSourceGcpResource" = proto.Field( + proto.MESSAGE, + number=26, + oneof="source_resource", + message="DataSourceGcpResource", + ) + data_source_backup_appliance_application: "DataSourceBackupApplianceApplication" = ( + proto.Field( + proto.MESSAGE, + number=27, + oneof="source_resource", + message="DataSourceBackupApplianceApplication", + ) + ) + + +class BackupConfigInfo(proto.Message): + r"""BackupConfigInfo has information about how the resource is + configured for Backup and about the most recent backup to this + vault. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + last_backup_state (google.cloud.backupdr_v1.types.BackupConfigInfo.LastBackupState): + Output only. The status of the last backup to + this BackupVault + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If the last backup were + successful, this field has the consistency date. + last_backup_error (google.rpc.status_pb2.Status): + Output only. If the last backup failed, this + field has the error message. + gcp_backup_config (google.cloud.backupdr_v1.types.GcpBackupConfig): + Configuration for a Google Cloud resource. + + This field is a member of `oneof`_ ``backup_config``. + backup_appliance_backup_config (google.cloud.backupdr_v1.types.BackupApplianceBackupConfig): + Configuration for an application backed up by + a Backup Appliance. + + This field is a member of `oneof`_ ``backup_config``. + """ + + class LastBackupState(proto.Enum): + r"""LastBackupstate tracks whether the last backup was not yet + started, successful, failed, or could not be run because of the + lack of permissions. + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + Status not set. + FIRST_BACKUP_PENDING (1): + The first backup has not yet completed + SUCCEEDED (2): + The most recent backup was successful + FAILED (3): + The most recent backup failed + PERMISSION_DENIED (4): + The most recent backup could not be + run/failed because of the lack of permissions + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + SUCCEEDED = 2 + FAILED = 3 + PERMISSION_DENIED = 4 + + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=1, + enum=LastBackupState, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + gcp_backup_config: "GcpBackupConfig" = proto.Field( + proto.MESSAGE, + number=4, + oneof="backup_config", + message="GcpBackupConfig", + ) + backup_appliance_backup_config: "BackupApplianceBackupConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_config", + message="BackupApplianceBackupConfig", + ) + + +class GcpBackupConfig(proto.Message): + r"""GcpBackupConfig captures the Backup configuration details for + Google Cloud resources. All Google Cloud resources regardless of + type are protected with backup plan associations. + + Attributes: + backup_plan (str): + The name of the backup plan. + backup_plan_description (str): + The description of the backup plan. + backup_plan_association (str): + The name of the backup plan association. + backup_plan_rules (MutableSequence[str]): + The names of the backup plan rules which + point to this backupvault + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_description: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class BackupApplianceBackupConfig(proto.Message): + r"""BackupApplianceBackupConfig captures the backup configuration + for applications that are protected by Backup Appliances. + + Attributes: + backup_appliance_name (str): + The name of the backup appliance. + backup_appliance_id (int): + The ID of the backup appliance. + sla_id (int): + The ID of the SLA of this application. + application_name (str): + The name of the application. + host_name (str): + The name of the host where the application is + running. + slt_name (str): + The name of the SLT associated with the + application. + slp_name (str): + The name of the SLP associated with the + application. + """ + + backup_appliance_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance_id: int = proto.Field( + proto.INT64, + number=2, + ) + sla_id: int = proto.Field( + proto.INT64, + number=3, + ) + application_name: str = proto.Field( + proto.STRING, + number=4, + ) + host_name: str = proto.Field( + proto.STRING, + number=5, + ) + slt_name: str = proto.Field( + proto.STRING, + number=6, + ) + slp_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DataSourceGcpResource(proto.Message): + r"""DataSourceGcpResource is used for protected resources that + are Google Cloud Resources. This name is easeier to understand + than GcpResourceDataSource or GcpDataSourceResource + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resourcename (str): + Output only. Full resource pathname URL of + the source Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + The type of the Google Cloud resource. Use + the Unified Resource Type, eg. + compute.googleapis.com/Instance. + compute_instance_datasource_properties (google.cloud.backupdr_v1.types.ComputeInstanceDataSourceProperties): + ComputeInstanceDataSourceProperties has a + subset of Compute Instance properties that are + useful at the Datasource level. + + This field is a member of `oneof`_ ``gcp_resource_properties``. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + compute_instance_datasource_properties: backupvault_gce.ComputeInstanceDataSourceProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="gcp_resource_properties", + message=backupvault_gce.ComputeInstanceDataSourceProperties, + ) + + +class DataSourceBackupApplianceApplication(proto.Message): + r"""BackupApplianceApplication describes a Source Resource when + it is an application backed up by a BackupAppliance. + + Attributes: + application_name (str): + The name of the Application as known to the + Backup Appliance. + backup_appliance (str): + Appliance name. + appliance_id (int): + Appliance Id of the Backup Appliance. + type_ (str): + The type of the application. e.g. VMBackup + application_id (int): + The appid field of the application within the + Backup Appliance. + hostname (str): + Hostname of the host where the application is + running. + host_id (int): + Hostid of the application host. + """ + + application_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance: str = proto.Field( + proto.STRING, + number=2, + ) + appliance_id: int = proto.Field( + proto.INT64, + number=3, + ) + type_: str = proto.Field( + proto.STRING, + number=4, + ) + application_id: int = proto.Field( + proto.INT64, + number=8, + ) + hostname: str = proto.Field( + proto.STRING, + number=6, + ) + host_id: int = proto.Field( + proto.INT64, + number=7, + ) + + +class ServiceLockInfo(proto.Message): + r"""ServiceLockInfo represents the details of a lock taken by the + service on a Backup resource. + + Attributes: + operation (str): + Output only. The name of the operation that + created this lock. The lock will automatically + be released when the operation completes. + """ + + operation: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BackupApplianceLockInfo(proto.Message): + r"""BackupApplianceLockInfo contains metadata about the + backupappliance that created the lock. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_appliance_id (int): + Required. The ID of the backup/recovery + appliance that created this lock. + backup_appliance_name (str): + Required. The name of the backup/recovery + appliance that created this lock. + lock_reason (str): + Required. The reason for the lock: e.g. + MOUNT/RESTORE/BACKUP/etc. The value of this + string is only meaningful to the client and it + is not interpreted by the BackupVault service. + job_name (str): + The job name on the backup/recovery appliance + that created this lock. + + This field is a member of `oneof`_ ``lock_source``. + backup_image (str): + The image name that depends on this Backup. + + This field is a member of `oneof`_ ``lock_source``. + sla_id (int): + The SLA on the backup/recovery appliance that + owns the lock. + + This field is a member of `oneof`_ ``lock_source``. + """ + + backup_appliance_id: int = proto.Field( + proto.INT64, + number=1, + ) + backup_appliance_name: str = proto.Field( + proto.STRING, + number=2, + ) + lock_reason: str = proto.Field( + proto.STRING, + number=5, + ) + job_name: str = proto.Field( + proto.STRING, + number=6, + oneof="lock_source", + ) + backup_image: str = proto.Field( + proto.STRING, + number=7, + oneof="lock_source", + ) + sla_id: int = proto.Field( + proto.INT64, + number=8, + oneof="lock_source", + ) + + +class BackupLock(proto.Message): + r"""BackupLock represents a single lock on a Backup resource. An + unexpired lock on a Backup prevents the Backup from being + deleted. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lock_until_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time after which this lock is + not considered valid and will no longer protect + the Backup from deletion. + backup_appliance_lock_info (google.cloud.backupdr_v1.types.BackupApplianceLockInfo): + If the client is a backup and recovery + appliance, this contains metadata about why the + lock exists. + + This field is a member of `oneof`_ ``ClientLockInfo``. + service_lock_info (google.cloud.backupdr_v1.types.ServiceLockInfo): + Output only. Contains metadata about the lock + exist for Google Cloud native backups. + + This field is a member of `oneof`_ ``ClientLockInfo``. + """ + + lock_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + backup_appliance_lock_info: "BackupApplianceLockInfo" = proto.Field( + proto.MESSAGE, + number=3, + oneof="ClientLockInfo", + message="BackupApplianceLockInfo", + ) + service_lock_info: "ServiceLockInfo" = proto.Field( + proto.MESSAGE, + number=4, + oneof="ClientLockInfo", + message="ServiceLockInfo", + ) + + +class Backup(proto.Message): + r"""Message describing a Backup object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup to create. It + must have the + format\ ``"projects//locations//backupVaults//dataSources/{datasource}/backups/{backup}"``. + ``{backup}`` cannot be changed after creation. It must be + between 3-63 characters long and must be unique within the + datasource. + description (str): + Output only. The description of the Backup + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined. + enforced_retention_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The backup can not be deleted + before this time. + + This field is a member of `oneof`_ ``_enforced_retention_end_time``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. When this backup is automatically + expired. + + This field is a member of `oneof`_ ``_expire_time``. + consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when this + backup was captured from the source. + + This field is a member of `oneof`_ ``_consistency_time``. + etag (str): + Optional. Server specified ETag to prevent + updates from overwriting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.Backup.State): + Output only. The Backup resource instance + state. + service_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Output only. The list of BackupLocks taken by + the service to prevent the deletion of the + backup. + backup_appliance_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Optional. The list of BackupLocks taken by + the accessor Backup Appliance. + compute_instance_backup_properties (google.cloud.backupdr_v1.types.ComputeInstanceBackupProperties): + Output only. Compute Engine specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_appliance_backup_properties (google.cloud.backupdr_v1.types.BackupApplianceBackupProperties): + Output only. Backup Appliance specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_type (google.cloud.backupdr_v1.types.Backup.BackupType): + Output only. Type of the backup, unspecified, + scheduled or ondemand. + gcp_backup_plan_info (google.cloud.backupdr_v1.types.Backup.GCPBackupPlanInfo): + Output only. Configuration for a Google Cloud + resource. + + This field is a member of `oneof`_ ``plan_info``. + resource_size_bytes (int): + Output only. source resource size in bytes at + the time of the backup. + """ + + class State(proto.Enum): + r"""Holds the state of the backup resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup is being created. + ACTIVE (2): + The backup has been created and is fully + usable. + DELETING (3): + The backup is being deleted. + ERROR (4): + The backup is experiencing an issue and might + be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class BackupType(proto.Enum): + r"""Type of the backup, scheduled or ondemand. + + Values: + BACKUP_TYPE_UNSPECIFIED (0): + Backup type is unspecified. + SCHEDULED (1): + Scheduled backup. + ON_DEMAND (2): + On demand backup. + """ + BACKUP_TYPE_UNSPECIFIED = 0 + SCHEDULED = 1 + ON_DEMAND = 2 + + class GCPBackupPlanInfo(proto.Message): + r"""GCPBackupPlanInfo captures the plan configuration details of + Google Cloud resources at the time of backup. + + Attributes: + backup_plan (str): + Resource name of backup plan by which + workload is protected at the time of the backup. + Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + backup_plan_rule_id (str): + The rule id of the backup plan which + triggered this backup in case of scheduled + backup or used for + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + enforced_retention_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=timestamp_pb2.Timestamp, + ) + consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + service_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="BackupLock", + ) + backup_appliance_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=18, + message="BackupLock", + ) + compute_instance_backup_properties: backupvault_gce.ComputeInstanceBackupProperties = proto.Field( + proto.MESSAGE, + number=19, + oneof="backup_properties", + message=backupvault_gce.ComputeInstanceBackupProperties, + ) + backup_appliance_backup_properties: backupvault_ba.BackupApplianceBackupProperties = proto.Field( + proto.MESSAGE, + number=21, + oneof="backup_properties", + message=backupvault_ba.BackupApplianceBackupProperties, + ) + backup_type: BackupType = proto.Field( + proto.ENUM, + number=20, + enum=BackupType, + ) + gcp_backup_plan_info: GCPBackupPlanInfo = proto.Field( + proto.MESSAGE, + number=22, + oneof="plan_info", + message=GCPBackupPlanInfo, + ) + resource_size_bytes: int = proto.Field( + proto.INT64, + number=23, + ) + + +class CreateBackupVaultRequest(proto.Message): + r"""Message for creating a BackupVault. + + Attributes: + parent (str): + Required. Value for parent. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating ID + server-side, remove this field and backup_vault_id from the + method_signature of Create RPC + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_vault_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListBackupVaultsRequest(proto.Message): + r"""Request message for listing backupvault stores. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupVaultView", + ) + + +class ListBackupVaultsResponse(proto.Message): + r"""Response message for listing BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class FetchUsableBackupVaultsRequest(proto.Message): + r"""Request message for fetching usable BackupVaults. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class FetchUsableBackupVaultsResponse(proto.Message): + r"""Response message for fetching usable BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupVaultRequest(proto.Message): + r"""Request message for getting a BackupVault. + + Attributes: + name (str): + Required. Name of the backupvault store resource name, in + the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupVaultView", + ) + + +class UpdateBackupVaultRequest(proto.Message): + r"""Request message for updating a BackupVault. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + force (bool): + Optional. If set to true, will not check plan + duration against backup vault enforcement + duration. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupVaultRequest(proto.Message): + r"""Message for deleting a BackupVault. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any data source + from this backup vault will also be deleted. + etag (str): + The current etag of the backup vault. + If an etag is provided and does not match the + current etag of the connection, deletion will be + blocked. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + allow_missing (bool): + Optional. If true and the BackupVault is not + found, the request will succeed but no action + will be taken. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListDataSourcesRequest(proto.Message): + r"""Request message for listing DataSources. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataSourcesResponse(proto.Message): + r"""Response message for listing DataSources. + + Attributes: + data_sources (MutableSequence[google.cloud.backupdr_v1.types.DataSource]): + The list of DataSource instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_sources: MutableSequence["DataSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDataSourceRequest(proto.Message): + r"""Request message for getting a DataSource instance. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataSourceRequest(proto.Message): + r"""Request message for updating a data source instance. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. Enable upsert. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_source: "DataSource" = proto.Field( + proto.MESSAGE, + number=2, + message="DataSource", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListBackupsRequest(proto.Message): + r"""Request message for listing Backups. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupView", + ) + + +class ListBackupsResponse(proto.Message): + r"""Response message for listing Backups. + + Attributes: + backups (MutableSequence[google.cloud.backupdr_v1.types.Backup]): + The list of Backup instances in the project + for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Request message for getting a Backup. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupView", + ) + + +class UpdateBackupRequest(proto.Message): + r"""Request message for updating a Backup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then the + request will fail. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: "Backup" = proto.Field( + proto.MESSAGE, + number=2, + message="Backup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreBackupRequest(proto.Message): + r"""Request message for restoring from a Backup. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the Backup instance, in the + format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + compute_instance_target_environment (google.cloud.backupdr_v1.types.ComputeInstanceTargetEnvironment): + Compute Engine target environment to be used + during restore. + + This field is a member of `oneof`_ ``target_environment``. + compute_instance_restore_properties (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties): + Compute Engine instance properties to be + overridden during restore. + + This field is a member of `oneof`_ ``instance_properties``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + compute_instance_target_environment: backupvault_gce.ComputeInstanceTargetEnvironment = proto.Field( + proto.MESSAGE, + number=3, + oneof="target_environment", + message=backupvault_gce.ComputeInstanceTargetEnvironment, + ) + compute_instance_restore_properties: backupvault_gce.ComputeInstanceRestoreProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="instance_properties", + message=backupvault_gce.ComputeInstanceRestoreProperties, + ) + + +class RestoreBackupResponse(proto.Message): + r"""Response message for restoring from a Backup. + + Attributes: + target_resource (google.cloud.backupdr_v1.types.TargetResource): + Details of the target resource + created/modified as part of restore. + """ + + target_resource: "TargetResource" = proto.Field( + proto.MESSAGE, + number=1, + message="TargetResource", + ) + + +class TargetResource(proto.Message): + r"""Details of the target resource created/modified as part of + restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resource (google.cloud.backupdr_v1.types.GcpResource): + Details of the native Google Cloud resource + created as part of restore. + + This field is a member of `oneof`_ ``target_resource_info``. + """ + + gcp_resource: "GcpResource" = proto.Field( + proto.MESSAGE, + number=1, + oneof="target_resource_info", + message="GcpResource", + ) + + +class GcpResource(proto.Message): + r"""Minimum details to identify a Google Cloud resource + + Attributes: + gcp_resourcename (str): + Name of the Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + Type of the resource. Use the Unified + Resource Type, eg. + compute.googleapis.com/Instance. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py new file mode 100644 index 000000000000..131f54b56abe --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupApplianceBackupProperties", + }, +) + + +class BackupApplianceBackupProperties(proto.Message): + r"""BackupApplianceBackupProperties represents BackupDR backup + appliance's properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + generation_id (int): + Output only. The numeric generation ID of the + backup (monotonically increasing). + + This field is a member of `oneof`_ ``_generation_id``. + finalize_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this backup object + was finalized (if none, backup is not + finalized). + + This field is a member of `oneof`_ ``_finalize_time``. + recovery_range_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The earliest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_start_time``. + recovery_range_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The latest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_end_time``. + """ + + generation_id: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + finalize_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py new file mode 100644 index 000000000000..9e3e98632644 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py @@ -0,0 +1,1991 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "KeyRevocationActionType", + "ComputeInstanceBackupProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ComputeInstanceDataSourceProperties", + "AdvancedMachineFeatures", + "ConfidentialInstanceConfig", + "DisplayDevice", + "AcceleratorConfig", + "CustomerEncryptionKey", + "Entry", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "AccessConfig", + "AliasIpRange", + "InstanceParams", + "AllocationAffinity", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "AttachedDisk", + "GuestOsFeature", + }, +) + + +class KeyRevocationActionType(proto.Enum): + r"""Specifies whether the virtual machine instance will be shut + down on key revocation. It is currently used in instance, + instance properties and GMI protos + + Values: + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NONE (1): + Indicates user chose no operation. + STOP (2): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 0 + NONE = 1 + STOP = 2 + + +class ComputeInstanceBackupProperties(proto.Message): + r"""ComputeInstanceBackupProperties represents Compute Engine + instance backup properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional text description for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_description``. + tags (google.cloud.backupdr_v1.types.Tags): + A list of tags to apply to the instances that + are created from these properties. The tags + identify valid sources or targets for network + firewalls. The setTags method can modify this + list of tags. Each tag within the list must + comply with RFC1035 + (https://www.ietf.org/rfc/rfc1035.txt). + + This field is a member of `oneof`_ ``_tags``. + machine_type (str): + The machine type to use for instances that + are created from these properties. + + This field is a member of `oneof`_ ``_machine_type``. + can_ip_forward (bool): + Enables instances created based on these properties to send + packets with source IP addresses other than their own and + receive packets with destination IP addresses other than + their own. If these instances will be used as an IP gateway + or it will be set as the next-hop in a Route resource, + specify ``true``. If unsure, leave this set to ``false``. + See the + https://cloud.google.com/vpc/docs/using-routes#canipforward + documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. + network_interface (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + An array of network access configurations for + this interface. + disk (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + An array of disks that are associated with + the instances that are created from these + properties. + metadata (google.cloud.backupdr_v1.types.Metadata): + The metadata key/value pairs to assign to + instances that are created from these + properties. These pairs can consist of custom + metadata or predefined keys. See + https://cloud.google.com/compute/docs/metadata/overview + for more information. + + This field is a member of `oneof`_ ``_metadata``. + service_account (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + A list of service accounts with specified + scopes. Access tokens for these service accounts + are available to the instances that are created + from these properties. Use metadata queries to + obtain the access tokens for these instances. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Specifies the scheduling options for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_scheduling``. + guest_accelerator (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + A list of guest accelerator cards' type and + count to use for instances created from these + properties. + min_cpu_platform (str): + Minimum cpu/platform to be used by instances. The instance + may be scheduled on the specified or newer cpu/platform. + Applicable values are the friendly names of CPU platforms, + such as ``minCpuPlatform: Intel Haswell`` or + ``minCpuPlatform: Intel Sandy Bridge``. For more + information, read + https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + source_instance (str): + The source instance used to create this + backup. This can be a partial or full URL to the + resource. For example, the following are valid + values: + + -https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance + -projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. + labels (MutableMapping[str, str]): + Labels to apply to instances that are created + from these properties. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Tags", + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + network_interface: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="NetworkInterface", + ) + disk: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AttachedDisk", + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Metadata", + ) + service_account: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="ServiceAccount", + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="Scheduling", + ) + guest_accelerator: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="AcceleratorConfig", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + source_instance: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + + +class ComputeInstanceRestoreProperties(proto.Message): + r"""ComputeInstanceRestoreProperties represents Compute Engine + instance properties to be overridden during restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the compute instance. + + This field is a member of `oneof`_ ``_name``. + advanced_machine_features (google.cloud.backupdr_v1.types.AdvancedMachineFeatures): + Optional. Controls for advanced + machine-related behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. + can_ip_forward (bool): + Optional. Allows this instance to send and + receive packets with non-matching destination or + source IPs. + + This field is a member of `oneof`_ ``_can_ip_forward``. + confidential_instance_config (google.cloud.backupdr_v1.types.ConfidentialInstanceConfig): + Optional. Controls Confidential compute + options on the instance + + This field is a member of `oneof`_ ``_confidential_instance_config``. + deletion_protection (bool): + Optional. Whether the resource should be + protected against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + description (str): + Optional. An optional description of this + resource. Provide this property when you create + the resource. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + Optional. Array of disks associated with this + instance. Persistent disks must be created + before you can assign them. + display_device (google.cloud.backupdr_v1.types.DisplayDevice): + Optional. Enables display device for the + instance. + + This field is a member of `oneof`_ ``_display_device``. + guest_accelerators (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + Optional. A list of the type and count of + accelerator cards attached to the instance. + hostname (str): + Optional. Specifies the hostname of the instance. The + specified hostname must be RFC1035 compliant. If hostname is + not specified, the default hostname is + [INSTANCE_NAME].c.[PROJECT_ID].internal when using the + global DNS, and + [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using + zonal DNS. + + This field is a member of `oneof`_ ``_hostname``. + instance_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts suspended data for an + instance with a customer-managed encryption key. + + This field is a member of `oneof`_ ``_instance_encryption_key``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + Optional. KeyRevocationActionType of the + instance. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + labels (MutableMapping[str, str]): + Optional. Labels to apply to this instance. + machine_type (str): + Optional. Full or partial URL of the machine + type resource to use for this instance. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.backupdr_v1.types.Metadata): + Optional. This includes custom metadata and + predefined keys. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Optional. Minimum CPU platform to use for + this instance. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + network_interfaces (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + Optional. An array of network configurations + for this instance. These specify how interfaces + are configured to interact with other network + services, such as connecting to the internet. + Multiple interfaces are supported per instance. + network_performance_config (google.cloud.backupdr_v1.types.NetworkPerformanceConfig): + Optional. Configure network performance such + as egress bandwidth tier. + + This field is a member of `oneof`_ ``_network_performance_config``. + params (google.cloud.backupdr_v1.types.InstanceParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. + private_ipv6_google_access (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties.InstancePrivateIpv6GoogleAccess): + Optional. The private IPv6 google access type for the VM. If + not specified, use INHERIT_FROM_SUBNETWORK as default. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + allocation_affinity (google.cloud.backupdr_v1.types.AllocationAffinity): + Optional. Specifies the reservations that + this instance can consume from. + + This field is a member of `oneof`_ ``_allocation_affinity``. + resource_policies (MutableSequence[str]): + Optional. Resource policies applied to this + instance. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Optional. Sets the scheduling options for + this instance. + + This field is a member of `oneof`_ ``_scheduling``. + service_accounts (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + Optional. A list of service accounts, with + their specified scopes, authorized for this + instance. Only one service account per VM + instance is supported. + tags (google.cloud.backupdr_v1.types.Tags): + Optional. Tags to apply to this instance. + Tags are used to identify valid sources or + targets for network firewalls and are specified + by the client during instance creation. + + This field is a member of `oneof`_ ``_tags``. + """ + + class InstancePrivateIpv6GoogleAccess(proto.Enum): + r"""The private IPv6 google access type for the VMs. + + Values: + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + Default value. This value is unused. + INHERIT_FROM_SUBNETWORK (1): + Each network interface inherits + PrivateIpv6GoogleAccess from its subnetwork. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (2): + Outbound private IPv6 access from VMs in this + subnet to Google services. If specified, the + subnetwork who is attached to the instance's + default network interface will be assigned an + internal IPv6 prefix if it doesn't have before. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (3): + Bidirectional private IPv6 access to/from + Google services. If specified, the subnetwork + who is attached to the instance's default + network interface will be assigned an internal + IPv6 prefix if it doesn't have before. + """ + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + INHERIT_FROM_SUBNETWORK = 1 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 2 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + advanced_machine_features: "AdvancedMachineFeatures" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="AdvancedMachineFeatures", + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + confidential_instance_config: "ConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="ConfidentialInstanceConfig", + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + disks: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AttachedDisk", + ) + display_device: "DisplayDevice" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="DisplayDevice", + ) + guest_accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AcceleratorConfig", + ) + hostname: str = proto.Field( + proto.STRING, + number=10, + optional=True, + ) + instance_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=11, + optional=True, + message="CustomerEncryptionKey", + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + machine_type: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="Metadata", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=16, + optional=True, + ) + network_interfaces: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="NetworkInterface", + ) + network_performance_config: "NetworkPerformanceConfig" = proto.Field( + proto.MESSAGE, + number=18, + optional=True, + message="NetworkPerformanceConfig", + ) + params: "InstanceParams" = proto.Field( + proto.MESSAGE, + number=19, + optional=True, + message="InstanceParams", + ) + private_ipv6_google_access: InstancePrivateIpv6GoogleAccess = proto.Field( + proto.ENUM, + number=20, + optional=True, + enum=InstancePrivateIpv6GoogleAccess, + ) + allocation_affinity: "AllocationAffinity" = proto.Field( + proto.MESSAGE, + number=21, + optional=True, + message="AllocationAffinity", + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22, + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=23, + optional=True, + message="Scheduling", + ) + service_accounts: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="ServiceAccount", + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=26, + optional=True, + message="Tags", + ) + + +class ComputeInstanceTargetEnvironment(proto.Message): + r"""ComputeInstanceTargetEnvironment represents Compute Engine + target environment to be used during restore. + + Attributes: + project (str): + Required. Target project for the Compute + Engine instance. + zone (str): + Required. The zone of the Compute Engine + instance. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ComputeInstanceDataSourceProperties(proto.Message): + r"""ComputeInstanceDataSourceProperties represents the properties + of a ComputeEngine resource that are stored in the DataSource. + + Attributes: + name (str): + Name of the compute instance backed up by the + datasource. + description (str): + The description of the Compute Engine + instance. + machine_type (str): + The machine type of the instance. + total_disk_count (int): + The total number of disks attached to the + Instance. + total_disk_size_gb (int): + The sum of all the disk sizes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + ) + total_disk_count: int = proto.Field( + proto.INT64, + number=4, + ) + total_disk_size_gb: int = proto.Field( + proto.INT64, + number=5, + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_nested_virtualization (bool): + Optional. Whether to enable nested + virtualization or not (default is false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. + threads_per_core (int): + Optional. The number of threads per physical + core. To disable simultaneous multithreading + (SMT) set this to 1. If unset, the maximum + number of threads supported per core by the + underlying processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + visible_core_count (int): + Optional. The number of physical cores to + expose to an instance. Multiply by the number of + threads per core to compute the total number of + virtual CPUs to expose to the instance. If + unset, the number of cores is inferred from the + instance's nominal CPU count and the underlying + platform's SMT width. + + This field is a member of `oneof`_ ``_visible_core_count``. + enable_uefi_networking (bool): + Optional. Whether to enable UEFI networking + for instance creation. + + This field is a member of `oneof`_ ``_enable_uefi_networking``. + """ + + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + threads_per_core: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + visible_core_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + enable_uefi_networking: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + + +class ConfidentialInstanceConfig(proto.Message): + r"""A set of Confidential Instance options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_confidential_compute (bool): + Optional. Defines whether the instance should + have confidential compute enabled. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class DisplayDevice(proto.Message): + r"""A set of Display Device options + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_display (bool): + Optional. Enables display for the Compute + Engine VM + + This field is a member of `oneof`_ ``_enable_display``. + """ + + enable_display: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class AcceleratorConfig(proto.Message): + r"""A specification of the type and number of accelerator cards + attached to the instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_type (str): + Optional. Full or partial URL of the + accelerator type resource to attach to this + instance. + + This field is a member of `oneof`_ ``_accelerator_type``. + accelerator_count (int): + Optional. The number of the guest accelerator + cards exposed to this instance. + + This field is a member of `oneof`_ ``_accelerator_count``. + """ + + accelerator_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + accelerator_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class CustomerEncryptionKey(proto.Message): + r"""A customer-supplied encryption key. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_key (str): + Optional. Specifies a 256-bit + customer-supplied encryption key. + + This field is a member of `oneof`_ ``key``. + rsa_encrypted_key (str): + Optional. RSA-wrapped 2048-bit + customer-supplied encryption key to either + encrypt or decrypt this resource. + + This field is a member of `oneof`_ ``key``. + kms_key_name (str): + Optional. The name of the encryption key that + is stored in Google Cloud KMS. + + This field is a member of `oneof`_ ``key``. + kms_key_service_account (str): + Optional. The service account being used for + the encryption request for the given KMS key. If + absent, the Compute Engine default service + account is used. + + This field is a member of `oneof`_ ``_kms_key_service_account``. + """ + + raw_key: str = proto.Field( + proto.STRING, + number=1, + oneof="key", + ) + rsa_encrypted_key: str = proto.Field( + proto.STRING, + number=2, + oneof="key", + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=3, + oneof="key", + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class Entry(proto.Message): + r"""A key/value pair to be used for storing metadata. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Key for the metadata entry. + + This field is a member of `oneof`_ ``_key``. + value (str): + Optional. Value for the metadata entry. These + are free-form strings, and only have meaning as + interpreted by the image running in the + instance. The only restriction placed on values + is that their size must be less than or equal to + 262144 bytes (256 KiB). + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class Metadata(proto.Message): + r"""A metadata key/value entry. + + Attributes: + items (MutableSequence[google.cloud.backupdr_v1.types.Entry]): + Optional. Array of key/value pairs. The total + size of all keys and values must be less than + 512 KB. + """ + + items: MutableSequence["Entry"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Entry", + ) + + +class NetworkInterface(proto.Message): + r"""A network interface resource attached to an instance. + s + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Optional. URL of the VPC network resource for + this instance. + + This field is a member of `oneof`_ ``_network``. + subnetwork (str): + Optional. The URL of the Subnetwork resource + for this instance. + + This field is a member of `oneof`_ ``_subnetwork``. + ip_address (str): + Optional. An IPv4 internal IP address to + assign to the instance for this network + interface. If not specified by the user, an + unused internal IP is assigned by the system. + + This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + Optional. An IPv6 internal network address + for this network interface. To use a static + internal IP address, it must be unused and in + the same region as the instance's zone. If not + specified, Google Cloud will automatically + assign an internal IPv6 address from the + instance's subnetwork. + + This field is a member of `oneof`_ ``_ipv6_address``. + internal_ipv6_prefix_length (int): + Optional. The prefix length of the primary + internal IPv6 range. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix_length``. + name (str): + Output only. [Output Only] The name of the network + interface, which is generated by the server. + + This field is a member of `oneof`_ ``_name``. + access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of configurations for this interface. + Currently, only one access config,ONE_TO_ONE_NAT is + supported. If there are no accessConfigs specified, then + this instance will have no external internet access. + ipv6_access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of IPv6 access configurations for this + interface. Currently, only one IPv6 access config, + DIRECT_IPV6, is supported. If there is no ipv6AccessConfig + specified, then this instance will have no external IPv6 + Internet access. + alias_ip_ranges (MutableSequence[google.cloud.backupdr_v1.types.AliasIpRange]): + Optional. An array of alias IP ranges for + this network interface. You can only specify + this field for network interfaces in VPC + networks. + stack_type (google.cloud.backupdr_v1.types.NetworkInterface.StackType): + The stack type for this network interface. + + This field is a member of `oneof`_ ``_stack_type``. + ipv6_access_type (google.cloud.backupdr_v1.types.NetworkInterface.Ipv6AccessType): + Optional. [Output Only] One of EXTERNAL, INTERNAL to + indicate whether the IP can be accessed from the Internet. + This field is always inherited from its subnetwork. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + queue_count (int): + Optional. The networking queue count that's + specified by users for the network interface. + Both Rx and Tx queues will be set to this + number. It'll be empty if not specified by the + users. + + This field is a member of `oneof`_ ``_queue_count``. + nic_type (google.cloud.backupdr_v1.types.NetworkInterface.NicType): + Optional. The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. + + This field is a member of `oneof`_ ``_nic_type``. + network_attachment (str): + Optional. The URL of the network attachment that this + interface should connect to in the following format: + projects/{project_number}/regions/{region_name}/networkAttachments/{network_attachment_name}. + + This field is a member of `oneof`_ ``_network_attachment``. + """ + + class StackType(proto.Enum): + r"""Stack type for this network interface. + + Values: + STACK_TYPE_UNSPECIFIED (0): + Default should be STACK_TYPE_UNSPECIFIED. + IPV4_ONLY (1): + The network interface will be assigned IPv4 + address. + IPV4_IPV6 (2): + The network interface can have both IPv4 and + IPv6 addresses. + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4_ONLY = 1 + IPV4_IPV6 = 2 + + class Ipv6AccessType(proto.Enum): + r"""IPv6 access type for this network interface. + + Values: + UNSPECIFIED_IPV6_ACCESS_TYPE (0): + IPv6 access type not set. Means this network + interface hasn't been turned on IPv6 yet. + INTERNAL (1): + This network interface can have internal + IPv6. + EXTERNAL (2): + This network interface can have external + IPv6. + """ + UNSPECIFIED_IPV6_ACCESS_TYPE = 0 + INTERNAL = 1 + EXTERNAL = 2 + + class NicType(proto.Enum): + r"""Nic type for this network interface. + + Values: + NIC_TYPE_UNSPECIFIED (0): + Default should be NIC_TYPE_UNSPECIFIED. + VIRTIO_NET (1): + VIRTIO + GVNIC (2): + GVNIC + """ + NIC_TYPE_UNSPECIFIED = 0 + VIRTIO_NET = 1 + GVNIC = 2 + + network: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + ipv6_address: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + internal_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AccessConfig", + ) + ipv6_access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="AccessConfig", + ) + alias_ip_ranges: MutableSequence["AliasIpRange"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AliasIpRange", + ) + stack_type: StackType = proto.Field( + proto.ENUM, + number=10, + optional=True, + enum=StackType, + ) + ipv6_access_type: Ipv6AccessType = proto.Field( + proto.ENUM, + number=11, + optional=True, + enum=Ipv6AccessType, + ) + queue_count: int = proto.Field( + proto.INT32, + number=12, + optional=True, + ) + nic_type: NicType = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=NicType, + ) + network_attachment: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + + +class NetworkPerformanceConfig(proto.Message): + r"""Network performance configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.backupdr_v1.types.NetworkPerformanceConfig.Tier): + Optional. The tier of the total egress + bandwidth. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Network performance tier. + + Values: + TIER_UNSPECIFIED (0): + This value is unused. + DEFAULT (1): + Default network performance config. + TIER_1 (2): + Tier 1 network performance config. + """ + TIER_UNSPECIFIED = 0 + DEFAULT = 1 + TIER_1 = 2 + + total_egress_bandwidth_tier: Tier = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Tier, + ) + + +class AccessConfig(proto.Message): + r"""An access configuration attached to an instance's network + interface. Only one access config per instance is supported. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.AccessConfig.AccessType): + Optional. In accessConfigs (IPv4), the default and only + option is ONE_TO_ONE_NAT. In ipv6AccessConfigs, the default + and only option is DIRECT_IPV6. + + This field is a member of `oneof`_ ``_type``. + name (str): + Optional. The name of this access + configuration. + + This field is a member of `oneof`_ ``_name``. + external_ip (str): + Optional. The external IP address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ip``. + external_ipv6 (str): + Optional. The external IPv6 address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ipv6``. + external_ipv6_prefix_length (int): + Optional. The prefix length of the external + IPv6 range. + + This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. + set_public_ptr (bool): + Optional. Specifies whether a public DNS + 'PTR' record should be created to map the + external IP address of the instance to a DNS + domain name. + + This field is a member of `oneof`_ ``_set_public_ptr``. + public_ptr_domain_name (str): + Optional. The DNS domain name for the public + PTR record. + + This field is a member of `oneof`_ ``_public_ptr_domain_name``. + network_tier (google.cloud.backupdr_v1.types.AccessConfig.NetworkTier): + Optional. This signifies the networking tier + used for configuring this access + + This field is a member of `oneof`_ ``_network_tier``. + """ + + class AccessType(proto.Enum): + r"""The type of configuration. + + Values: + ACCESS_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + ONE_TO_ONE_NAT (1): + ONE_TO_ONE_NAT + DIRECT_IPV6 (2): + Direct IPv6 access. + """ + ACCESS_TYPE_UNSPECIFIED = 0 + ONE_TO_ONE_NAT = 1 + DIRECT_IPV6 = 2 + + class NetworkTier(proto.Enum): + r"""Network tier property used by addresses, instances and + forwarding rules. + + Values: + NETWORK_TIER_UNSPECIFIED (0): + Default value. This value is unused. + PREMIUM (1): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (2): + Public internet quality, only limited support + for other networking products. + """ + NETWORK_TIER_UNSPECIFIED = 0 + PREMIUM = 1 + STANDARD = 2 + + type_: AccessType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=AccessType, + ) + name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + external_ip: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + external_ipv6: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + external_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + set_public_ptr: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + public_ptr_domain_name: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + network_tier: NetworkTier = proto.Field( + proto.ENUM, + number=8, + optional=True, + enum=NetworkTier, + ) + + +class AliasIpRange(proto.Message): + r"""An alias IP range attached to an instance's network + interface. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + Optional. The IP alias ranges to allocate for + this interface. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + subnetwork_range_name (str): + Optional. The name of a subnetwork secondary + IP range from which to allocate an IP alias + range. If not specified, the primary range of + the subnetwork is used. + + This field is a member of `oneof`_ ``_subnetwork_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork_range_name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class InstanceParams(proto.Message): + r"""Additional instance params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Optional. Resource manager tags to be bound + to the instance. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class AllocationAffinity(proto.Message): + r"""Specifies the reservations that this instance can consume + from. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consume_allocation_type (google.cloud.backupdr_v1.types.AllocationAffinity.Type): + Optional. Specifies the type of reservation + from which this instance can consume + + This field is a member of `oneof`_ ``_consume_allocation_type``. + key (str): + Optional. Corresponds to the label key of a + reservation resource. + + This field is a member of `oneof`_ ``_key``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + a reservation resource. + """ + + class Type(proto.Enum): + r"""Indicates whether to consume from a reservation or not. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NO_RESERVATION (1): + Do not consume from any allocated capacity. + ANY_RESERVATION (2): + Consume any allocation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_allocation_type: Type = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class Scheduling(proto.Message): + r"""Sets the scheduling options for an Instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + on_host_maintenance (google.cloud.backupdr_v1.types.Scheduling.OnHostMaintenance): + Optional. Defines the maintenance behavior + for this instance. + + This field is a member of `oneof`_ ``_on_host_maintenance``. + automatic_restart (bool): + Optional. Specifies whether the instance + should be automatically restarted if it is + terminated by Compute Engine (not terminated by + a user). + + This field is a member of `oneof`_ ``_automatic_restart``. + preemptible (bool): + Optional. Defines whether the instance is + preemptible. + + This field is a member of `oneof`_ ``_preemptible``. + node_affinities (MutableSequence[google.cloud.backupdr_v1.types.Scheduling.NodeAffinity]): + Optional. A set of node affinity and + anti-affinity configurations. Overrides + reservationAffinity. + min_node_cpus (int): + Optional. The minimum number of virtual CPUs + this instance will consume when running on a + sole-tenant node. + + This field is a member of `oneof`_ ``_min_node_cpus``. + provisioning_model (google.cloud.backupdr_v1.types.Scheduling.ProvisioningModel): + Optional. Specifies the provisioning model of + the instance. + + This field is a member of `oneof`_ ``_provisioning_model``. + instance_termination_action (google.cloud.backupdr_v1.types.Scheduling.InstanceTerminationAction): + Optional. Specifies the termination action + for the instance. + + This field is a member of `oneof`_ ``_instance_termination_action``. + local_ssd_recovery_timeout (google.cloud.backupdr_v1.types.SchedulingDuration): + Optional. Specifies the maximum amount of + time a Local Ssd Vm should wait while recovery + of the Local Ssd state is attempted. Its value + should be in between 0 and 168 hours with hour + granularity and the default value being 1 hour. + + This field is a member of `oneof`_ ``_local_ssd_recovery_timeout``. + """ + + class OnHostMaintenance(proto.Enum): + r"""Defines the maintenance behavior for this instance= + + Values: + ON_HOST_MAINTENANCE_UNSPECIFIED (0): + Default value. This value is unused. + TERMINATE (1): + Tells Compute Engine to terminate and + (optionally) restart the instance away from the + maintenance activity. + MIGRATE (1000): + Default, Allows Compute Engine to + automatically migrate instances out of the way + of maintenance events. + """ + ON_HOST_MAINTENANCE_UNSPECIFIED = 0 + TERMINATE = 1 + MIGRATE = 1000 + + class ProvisioningModel(proto.Enum): + r"""Defines the provisioning model for an instance. + + Values: + PROVISIONING_MODEL_UNSPECIFIED (0): + Default value. This value is not used. + STANDARD (1): + Standard provisioning with user controlled + runtime, no discounts. + SPOT (2): + Heavily discounted, no guaranteed runtime. + """ + PROVISIONING_MODEL_UNSPECIFIED = 0 + STANDARD = 1 + SPOT = 2 + + class InstanceTerminationAction(proto.Enum): + r"""Defines the supported termination actions for an instance. + + Values: + INSTANCE_TERMINATION_ACTION_UNSPECIFIED (0): + Default value. This value is unused. + DELETE (1): + Delete the VM. + STOP (2): + Stop the VM without storing in-memory + content. default action. + """ + INSTANCE_TERMINATION_ACTION_UNSPECIFIED = 0 + DELETE = 1 + STOP = 2 + + class NodeAffinity(proto.Message): + r"""Node Affinity: the configuration of desired nodes onto which + this Instance could be scheduled. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Corresponds to the label key of + Node resource. + + This field is a member of `oneof`_ ``_key``. + operator (google.cloud.backupdr_v1.types.Scheduling.NodeAffinity.Operator): + Optional. Defines the operation of node + selection. + + This field is a member of `oneof`_ ``_operator``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + Node resource. + """ + + class Operator(proto.Enum): + r"""Defines the type of node selections. + + Values: + OPERATOR_UNSPECIFIED (0): + Default value. This value is unused. + IN (1): + Requires Compute Engine to seek for matched + nodes. + NOT_IN (2): + Requires Compute Engine to avoid certain + nodes. + """ + OPERATOR_UNSPECIFIED = 0 + IN = 1 + NOT_IN = 2 + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + operator: "Scheduling.NodeAffinity.Operator" = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum="Scheduling.NodeAffinity.Operator", + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + on_host_maintenance: OnHostMaintenance = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=OnHostMaintenance, + ) + automatic_restart: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=NodeAffinity, + ) + min_node_cpus: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + provisioning_model: ProvisioningModel = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=ProvisioningModel, + ) + instance_termination_action: InstanceTerminationAction = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=InstanceTerminationAction, + ) + local_ssd_recovery_timeout: "SchedulingDuration" = proto.Field( + proto.MESSAGE, + number=10, + optional=True, + message="SchedulingDuration", + ) + + +class SchedulingDuration(proto.Message): + r"""A SchedulingDuration represents a fixed-length span of time + represented as a count of seconds and fractions of seconds at + nanosecond resolution. It is independent of any calendar and + concepts like "day" or "month". Range is approximately 10,000 + years. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + seconds (int): + Optional. Span of time at a resolution of a + second. + + This field is a member of `oneof`_ ``_seconds``. + nanos (int): + Optional. Span of time that's a fraction of a + second at nanosecond resolution. + + This field is a member of `oneof`_ ``_nanos``. + """ + + seconds: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + nanos: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class ServiceAccount(proto.Message): + r"""A service account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + Optional. Email address of the service + account. + + This field is a member of `oneof`_ ``_email``. + scopes (MutableSequence[str]): + Optional. The list of scopes to be made + available for this service account. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class Tags(proto.Message): + r"""A set of instance tags. + + Attributes: + items (MutableSequence[str]): + Optional. An array of tags. Each tag must be + 1-63 characters long, and comply with RFC1035. + """ + + items: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class AttachedDisk(proto.Message): + r"""An instance-attached disk resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + initialize_params (google.cloud.backupdr_v1.types.AttachedDisk.InitializeParams): + Optional. Specifies the parameters to + initialize this disk. + + This field is a member of `oneof`_ ``_initialize_params``. + device_name (str): + Optional. This is used as an identifier for the disks. This + is the unique name has to provided to modify disk parameters + like disk_name and replica_zones (in case of RePDs) + + This field is a member of `oneof`_ ``_device_name``. + kind (str): + Optional. Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + disk_type_deprecated (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Specifies the type of the disk. + + This field is a member of `oneof`_ ``_disk_type_deprecated``. + mode (google.cloud.backupdr_v1.types.AttachedDisk.DiskMode): + Optional. The mode in which to attach this + disk. + + This field is a member of `oneof`_ ``_mode``. + source (str): + Optional. Specifies a valid partial or full + URL to an existing Persistent Disk resource. + + This field is a member of `oneof`_ ``_source``. + index (int): + Optional. A zero-based index to this disk, + where 0 is reserved for the boot disk. + + This field is a member of `oneof`_ ``_index``. + boot (bool): + Optional. Indicates that this is a boot disk. + The virtual machine will use the first partition + of the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. + auto_delete (bool): + Optional. Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + license_ (MutableSequence[str]): + Optional. Any valid publicly visible + licenses. + disk_interface (google.cloud.backupdr_v1.types.AttachedDisk.DiskInterface): + Optional. Specifies the disk interface to use + for attaching this disk. + + This field is a member of `oneof`_ ``_disk_interface``. + guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): + Optional. A list of features to enable on the + guest operating system. Applicable only for + bootable images. + disk_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts or decrypts a disk using a + customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + disk_size_gb (int): + Optional. The size of the disk in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + saved_state (google.cloud.backupdr_v1.types.AttachedDisk.DiskSavedState): + Optional. Output only. The state of the disk. + + This field is a member of `oneof`_ ``_saved_state``. + disk_type (str): + Optional. Output only. The URI of the disk + type resource. For example: + projects/project/zones/zone/diskTypes/pd-standard + or pd-ssd + + This field is a member of `oneof`_ ``_disk_type``. + type_ (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Optional. Specifies the type of the disk. + + This field is a member of `oneof`_ ``_type``. + """ + + class DiskType(proto.Enum): + r"""List of the Disk Types. + + Values: + DISK_TYPE_UNSPECIFIED (0): + Default value, which is unused. + SCRATCH (1): + A scratch disk type. + PERSISTENT (2): + A persistent disk type. + """ + DISK_TYPE_UNSPECIFIED = 0 + SCRATCH = 1 + PERSISTENT = 2 + + class DiskMode(proto.Enum): + r"""List of the Disk Modes. + + Values: + DISK_MODE_UNSPECIFIED (0): + Default value, which is unused. + READ_WRITE (1): + Attaches this disk in read-write mode. Only + one virtual machine at a time can be attached to + a disk in read-write mode. + READ_ONLY (2): + Attaches this disk in read-only mode. + Multiple virtual machines can use a disk in + read-only mode at a time. + LOCKED (3): + The disk is locked for administrative + reasons. Nobody else can use the disk. This mode + is used (for example) when taking a snapshot of + a disk to prevent mounting the disk while it is + being snapshotted. + """ + DISK_MODE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + LOCKED = 3 + + class DiskInterface(proto.Enum): + r"""List of the Disk Interfaces. + + Values: + DISK_INTERFACE_UNSPECIFIED (0): + Default value, which is unused. + SCSI (1): + SCSI Disk Interface. + NVME (2): + NVME Disk Interface. + NVDIMM (3): + NVDIMM Disk Interface. + ISCSI (4): + ISCSI Disk Interface. + """ + DISK_INTERFACE_UNSPECIFIED = 0 + SCSI = 1 + NVME = 2 + NVDIMM = 3 + ISCSI = 4 + + class DiskSavedState(proto.Enum): + r"""List of the states of the Disk. + + Values: + DISK_SAVED_STATE_UNSPECIFIED (0): + Default Disk state has not been preserved. + PRESERVED (1): + Disk state has been preserved. + """ + DISK_SAVED_STATE_UNSPECIFIED = 0 + PRESERVED = 1 + + class InitializeParams(proto.Message): + r"""Specifies the parameters to initialize this disk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_name (str): + Optional. Specifies the disk name. If not + specified, the default is to use the name of the + instance. + + This field is a member of `oneof`_ ``_disk_name``. + replica_zones (MutableSequence[str]): + Optional. URL of the zone where the disk + should be created. Required for each regional + disk associated with the instance. + """ + + disk_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + initialize_params: InitializeParams = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message=InitializeParams, + ) + device_name: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + disk_type_deprecated: DiskType = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=DiskType, + ) + mode: DiskMode = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=DiskMode, + ) + source: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + index: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + boot: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + auto_delete: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + license_: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + disk_interface: DiskInterface = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=DiskInterface, + ) + guest_os_feature: MutableSequence["GuestOsFeature"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="GuestOsFeature", + ) + disk_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="CustomerEncryptionKey", + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=16, + optional=True, + ) + saved_state: DiskSavedState = proto.Field( + proto.ENUM, + number=17, + optional=True, + enum=DiskSavedState, + ) + disk_type: str = proto.Field( + proto.STRING, + number=18, + optional=True, + ) + type_: DiskType = proto.Field( + proto.ENUM, + number=19, + optional=True, + enum=DiskType, + ) + + +class GuestOsFeature(proto.Message): + r"""Feature type of the Guest OS. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.GuestOsFeature.FeatureType): + The ID of a supported feature. + + This field is a member of `oneof`_ ``_type``. + """ + + class FeatureType(proto.Enum): + r"""List of the Feature Types. + + Values: + FEATURE_TYPE_UNSPECIFIED (0): + Default value, which is unused. + VIRTIO_SCSI_MULTIQUEUE (1): + VIRTIO_SCSI_MULTIQUEUE feature type. + WINDOWS (2): + WINDOWS feature type. + MULTI_IP_SUBNET (3): + MULTI_IP_SUBNET feature type. + UEFI_COMPATIBLE (4): + UEFI_COMPATIBLE feature type. + SECURE_BOOT (5): + SECURE_BOOT feature type. + GVNIC (6): + GVNIC feature type. + SEV_CAPABLE (7): + SEV_CAPABLE feature type. + BARE_METAL_LINUX_COMPATIBLE (8): + BARE_METAL_LINUX_COMPATIBLE feature type. + SUSPEND_RESUME_COMPATIBLE (9): + SUSPEND_RESUME_COMPATIBLE feature type. + SEV_LIVE_MIGRATABLE (10): + SEV_LIVE_MIGRATABLE feature type. + SEV_SNP_CAPABLE (11): + SEV_SNP_CAPABLE feature type. + TDX_CAPABLE (12): + TDX_CAPABLE feature type. + IDPF (13): + IDPF feature type. + SEV_LIVE_MIGRATABLE_V2 (14): + SEV_LIVE_MIGRATABLE_V2 feature type. + """ + FEATURE_TYPE_UNSPECIFIED = 0 + VIRTIO_SCSI_MULTIQUEUE = 1 + WINDOWS = 2 + MULTI_IP_SUBNET = 3 + UEFI_COMPATIBLE = 4 + SECURE_BOOT = 5 + GVNIC = 6 + SEV_CAPABLE = 7 + BARE_METAL_LINUX_COMPATIBLE = 8 + SUSPEND_RESUME_COMPATIBLE = 9 + SEV_LIVE_MIGRATABLE = 10 + SEV_SNP_CAPABLE = 11 + TDX_CAPABLE = 12 + IDPF = 13 + SEV_LIVE_MIGRATABLE_V2 = 14 + + type_: FeatureType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=FeatureType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py new file mode 100644 index 000000000000..25dbf9cca081 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py new file mode 100644 index 000000000000..fc82ca77f706 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py new file mode 100644 index 000000000000..ff546daa2ac6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py new file mode 100644 index 000000000000..5c648a085be1 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py new file mode 100644 index 000000000000..1acf666c5d38 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py new file mode 100644 index 000000000000..5a1abda3a275 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py new file mode 100644 index 000000000000..346d24a9543e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py new file mode 100644 index 000000000000..21af239763d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py new file mode 100644 index 000000000000..18c4ca0cae3b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py new file mode 100644 index 000000000000..3423852e66cd --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py new file mode 100644 index 000000000000..aaabe6e55265 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py new file mode 100644 index 000000000000..dee368f1cd32 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py new file mode 100644 index 000000000000..a70379011f44 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py new file mode 100644 index 000000000000..2b824ef4088e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py new file mode 100644 index 000000000000..ab7dc9c365b4 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py new file mode 100644 index 000000000000..0e4abb2342d5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py new file mode 100644 index 000000000000..27f69f503b1b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py new file mode 100644 index 000000000000..666e503e039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py new file mode 100644 index 000000000000..f16d4b5dcdc6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py new file mode 100644 index 000000000000..bbca5985c4d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py new file mode 100644 index 000000000000..3e6f35ccdc90 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py new file mode 100644 index 000000000000..064cbac8920e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py new file mode 100644 index 000000000000..95d30ed5bf46 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py new file mode 100644 index 000000000000..814ccccaf4a0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py new file mode 100644 index 000000000000..864ee90db114 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py new file mode 100644 index 000000000000..95f18218de42 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py new file mode 100644 index 000000000000..e6cfd3cc039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py new file mode 100644 index 000000000000..39b135ce9944 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py new file mode 100644 index 000000000000..f09593b5796b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py new file mode 100644 index 000000000000..ab2cab9b1701 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py new file mode 100644 index 000000000000..675c345b810c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py new file mode 100644 index 000000000000..27b1faa5debb --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py new file mode 100644 index 000000000000..da6366f3b095 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackups_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py new file mode 100644 index 000000000000..18387f7371a5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackups_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py new file mode 100644 index 000000000000..f5cb4d5a4477 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListDataSources_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py new file mode 100644 index 000000000000..36680bf32e15 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListDataSources_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py new file mode 100644 index 000000000000..9bdfab3c21bc --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_RestoreBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_RestoreBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py new file mode 100644 index 000000000000..6b503fb4a546 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_RestoreBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_RestoreBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py new file mode 100644 index 000000000000..6c6c641d54ee --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py new file mode 100644 index 000000000000..359727f2dd1c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py new file mode 100644 index 000000000000..a1a2fcc0ce51 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py new file mode 100644 index 000000000000..9ea7e26404d0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py new file mode 100644 index 000000000000..386f2ca872d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py new file mode 100644 index 000000000000..ab3690e1df33 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py new file mode 100644 index 000000000000..986de214c53d --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py new file mode 100644 index 000000000000..d20aa5d93848 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index ff879435143f..904b6f7dbef2 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -11,6 +11,537 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_async", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_sync", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py" + }, { "canonical": true, "clientMethod": { @@ -21,28 +552,3260 @@ }, "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_management_server", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "RestoreBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "RestoreBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { - "name": "management_server_id", + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "TriggerBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "rule_id", "type": "str" }, { @@ -59,13 +3822,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", "segments": [ { "end": 56, @@ -98,7 +3861,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" }, { "canonical": true, @@ -107,30 +3870,26 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "TriggerBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" - }, - { - "name": "management_server_id", + "name": "rule_id", "type": "str" }, { @@ -147,13 +3906,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", "segments": [ { "end": 56, @@ -186,7 +3945,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" }, { "canonical": true, @@ -196,23 +3955,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -228,21 +3991,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -252,22 +4015,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py" }, { "canonical": true, @@ -276,23 +4039,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -308,21 +4075,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -332,22 +4099,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py" }, { "canonical": true, @@ -357,23 +4124,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -388,22 +4159,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -413,22 +4184,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_async.py" }, { "canonical": true, @@ -437,23 +4208,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -468,22 +4243,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -493,22 +4268,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_sync.py" }, { "canonical": true, @@ -518,23 +4293,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -549,22 +4328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -574,22 +4353,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_async.py" }, { "canonical": true, @@ -598,23 +4377,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -629,22 +4412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -654,22 +4437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_sync.py" } ] } diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index b65698148046..c0dd15568f46 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -39,10 +39,33 @@ def partition( class backupdrCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup_plan': ('parent', 'backup_plan_id', 'backup_plan', 'request_id', ), + 'create_backup_plan_association': ('parent', 'backup_plan_association_id', 'backup_plan_association', 'request_id', ), + 'create_backup_vault': ('parent', 'backup_vault_id', 'backup_vault', 'request_id', 'validate_only', ), 'create_management_server': ('parent', 'management_server_id', 'management_server', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), + 'delete_backup_plan': ('name', 'request_id', ), + 'delete_backup_plan_association': ('name', 'request_id', ), + 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', ), 'delete_management_server': ('name', 'request_id', ), + 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'get_backup': ('name', 'view', ), + 'get_backup_plan': ('name', ), + 'get_backup_plan_association': ('name', ), + 'get_backup_vault': ('name', 'view', ), + 'get_data_source': ('name', ), 'get_management_server': ('name', ), + 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'compute_instance_restore_properties', ), + 'trigger_backup': ('name', 'rule_id', 'request_id', ), + 'update_backup': ('update_mask', 'backup', 'request_id', ), + 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', ), + 'update_data_source': ('update_mask', 'data_source', 'request_id', 'allow_missing', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 1b1c7ad74e91..ee098d5a5646 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -48,10 +48,16 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -66,7 +72,14 @@ pagers, transports, ) -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) def client_cert_source_callback(): @@ -2911,52 +2924,98 @@ async def test_delete_management_server_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupdr.ListManagementServersRequest, + backupvault.CreateBackupVaultRequest, dict, ], ) -def test_list_management_servers_rest(request_type): +def test_create_backup_vault(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_management_servers(request) +def test_create_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) -def test_list_management_servers_rest_use_cached_wrapped_rpc(): +def test_create_backup_vault_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2965,8 +3024,7 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_management_servers - in client._transport._wrapped_methods + client._transport.create_backup_vault in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -2975,128 +3033,18962 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_management_servers + client._transport.create_backup_vault ] = mock_rpc - request = {} - client.list_management_servers(request) + client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_management_servers(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_management_servers_rest_required_fields( - request_type=backupdr.ListManagementServersRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_create_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.create_backup_vault + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_vault + ] = mock_rpc - client = BackupDRClient( + request = {} + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault(request) - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.list_management_servers(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_backup_vault_async_from_dict(): + await test_create_backup_vault_async(request_type=dict) -def test_list_management_servers_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_management_servers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_management_servers_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +def test_create_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +def test_list_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_from_dict(): + await test_list_backup_vaults_async(request_type=dict) + + +def test_list_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.ListBackupVaultsResponse() + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_vaults(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_list_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +def test_fetch_usable_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_fetch_usable_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_usable_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async( + transport: str = "grpc_asyncio", + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_from_dict(): + await test_fetch_usable_backup_vaults_async(request_type=dict) + + +def test_fetch_usable_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_fetch_usable_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_fetch_usable_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.fetch_usable_backup_vaults( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_fetch_usable_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_usable_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_usable_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_usable_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + response = client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +def test_get_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupVaultRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest( + name="name_value", + ) + + +def test_get_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_vault + ] = mock_rpc + + request = {} + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_from_dict(): + await test_get_backup_vault_async(request_type=dict) + + +def test_get_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = backupvault.BackupVault() + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupVaultRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup_vault + ] = mock_rpc + + request = {} + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_from_dict(): + await test_update_backup_vault_async(request_type=dict) + + +def test_update_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +def test_delete_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_from_dict(): + await test_delete_backup_vault_async(request_type=dict) + + +def test_delete_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +def test_list_data_sources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_data_sources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_list_data_sources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_sources + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_sources + ] = mock_rpc + + request = {} + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_data_sources_async_from_dict(): + await test_list_data_sources_async(request_type=dict) + + +def test_list_data_sources_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = backupvault.ListDataSourcesResponse() + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_sources_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + +def test_list_data_sources_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +def test_get_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest( + name="name_value", + ) + + +def test_get_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +@pytest.mark.asyncio +async def test_get_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_source + ] = mock_rpc + + request = {} + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_data_source_async_from_dict(): + await test_get_data_source_async(request_type=dict) + + +def test_get_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = backupvault.DataSource() + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateDataSourceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +@pytest.mark.asyncio +async def test_update_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_source + ] = mock_rpc + + request = {} + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_source_async_from_dict(): + await test_update_data_source_async(request_type=dict) + + +def test_update_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +def test_update_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backups + ] = mock_rpc + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = backupvault.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup + ] = mock_rpc + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backupvault.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup + ] = mock_rpc + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest( + name="name_value", + ) + + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup + ] = mock_rpc + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +def test_restore_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.RestoreBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest( + name="name_value", + ) + + +def test_restore_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +@pytest.mark.asyncio +async def test_restore_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.restore_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.restore_backup + ] = mock_rpc + + request = {} + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_backup_async_from_dict(): + await test_restore_backup_async(request_type=dict) + + +def test_restore_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restore_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restore_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +def test_create_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan + ] = mock_rpc + + request = {} + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_from_dict(): + await test_create_backup_plan_async(request_type=dict) + + +def test_create_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +def test_create_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + response = client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +def test_get_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.GetBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest( + name="name_value", + ) + + +def test_get_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan + ] = mock_rpc + + request = {} + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_from_dict(): + await test_get_backup_plan_async(request_type=dict) + + +def test_get_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +def test_list_backup_plans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_plans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plans + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plans + ] = mock_rpc + + request = {} + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_async( + transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_from_dict(): + await test_list_backup_plans_async(request_type=dict) + + +def test_list_backup_plans_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plans_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plans_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + +def test_list_backup_plans_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plans(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plans( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + +def test_delete_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_from_dict(): + await test_delete_backup_plan_async(request_type=dict) + + +def test_delete_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_from_dict(): + await test_create_backup_plan_association_async(request_type=dict) + + +def test_create_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +def test_create_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + response = client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_get_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_from_dict(): + await test_get_backup_plan_association_async(request_type=dict) + + +def test_get_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.ListBackupPlanAssociationsRequest, + dict, + ], +) +def test_list_backup_plan_associations(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plan_associations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_backup_plan_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_associations + ] = mock_rpc + request = {} + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plan_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plan_associations + ] = mock_rpc + + request = {} + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_from_dict(): + await test_list_backup_plan_associations_async(request_type=dict) + + +def test_list_backup_plan_associations_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plan_associations_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plan_associations_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + +def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plan_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_delete_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan_association + ] = mock_rpc + request = {} + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan_association + ] = mock_rpc + + request = {} + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_from_dict(): + await test_delete_backup_plan_association_async(request_type=dict) + + +def test_delete_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.TriggerBackupRequest, + dict, + ], +) +def test_trigger_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_trigger_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +def test_trigger_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + +def test_trigger_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.trigger_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + request = {} + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +@pytest.mark.asyncio +async def test_trigger_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.trigger_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.trigger_backup + ] = mock_rpc + + request = {} + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.TriggerBackupRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_trigger_backup_async_from_dict(): + await test_trigger_backup_async(request_type=dict) + + +def test_trigger_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_trigger_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_trigger_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +def test_trigger_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.ListManagementServersRequest, + dict, + ], +) +def test_list_management_servers_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_management_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_management_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_management_servers + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_management_servers + ] = mock_rpc + + request = {} + client.list_management_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_management_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_management_servers_rest_required_fields( + request_type=backupdr.ListManagementServersRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_management_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_management_servers_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_management_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_management_servers_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_management_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.ListManagementServersRequest.pb( + backupdr.ListManagementServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ListManagementServersResponse.to_json( + backupdr.ListManagementServersResponse() + ) + + request = backupdr.ListManagementServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ListManagementServersResponse() + + client.list_management_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_management_servers_rest_bad_request( + transport: str = "rest", request_type=backupdr.ListManagementServersRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_management_servers(request) + + +def test_list_management_servers_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_management_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_list_management_servers_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_management_servers( + backupdr.ListManagementServersRequest(), + parent="parent_value", + ) + + +def test_list_management_servers_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + next_page_token="abc", + ), + backupdr.ListManagementServersResponse( + management_servers=[], + next_page_token="def", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + ], + next_page_token="ghi", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupdr.ListManagementServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_management_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupdr.ManagementServer) for i in results) + + pages = list(client.list_management_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.GetManagementServerRequest, + dict, + ], +) +def test_get_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer( + name="name_value", + description="description_value", + type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, + state=backupdr.ManagementServer.InstanceState.CREATING, + etag="etag_value", + oauth2_client_id="oauth2_client_id_value", + ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_management_server(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupdr.ManagementServer) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE + assert response.state == backupdr.ManagementServer.InstanceState.CREATING + assert response.etag == "etag_value" + assert response.oauth2_client_id == "oauth2_client_id_value" + assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True + + +def test_get_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_management_server + ] = mock_rpc + + request = {} + client.get_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_management_server_rest_required_fields( + request_type=backupdr.GetManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.GetManagementServerRequest.pb( + backupdr.GetManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ManagementServer.to_json( + backupdr.ManagementServer() + ) + + request = backupdr.GetManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ManagementServer() + + client.get_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.GetManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_management_server(request) + + +def test_get_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_get_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_management_server( + backupdr.GetManagementServerRequest(), + name="name_value", + ) + + +def test_get_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.CreateManagementServerRequest, + dict, + ], +) +def test_create_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["management_server"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "type_": 1, + "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, + "workforce_identity_based_management_uri": { + "first_party_management_uri": "first_party_management_uri_value", + "third_party_management_uri": "third_party_management_uri_value", + }, + "state": 1, + "networks": [{"network": "network_value", "peering_mode": 1}], + "etag": "etag_value", + "oauth2_client_id": "oauth2_client_id_value", + "workforce_identity_based_oauth2_client_id": { + "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", + "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", + }, + "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], + "satisfies_pzs": {"value": True}, + "satisfies_pzi": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["management_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["management_server"][field])): + del request_init["management_server"][field][i][subfield] + else: + del request_init["management_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_management_server + ] = mock_rpc + + request = {} + client.create_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_management_server_rest_required_fields( + request_type=backupdr.CreateManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["management_server_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "managementServerId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "managementServerId" in jsonified_request + assert ( + jsonified_request["managementServerId"] == request_init["management_server_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["managementServerId"] = "management_server_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "management_server_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "managementServerId" in jsonified_request + assert jsonified_request["managementServerId"] == "management_server_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_management_server(request) + + expected_params = [ + ( + "managementServerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_management_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "managementServerId", + "requestId", + ) + ) + & set( + ( + "parent", + "managementServerId", + "managementServer", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.CreateManagementServerRequest.pb( + backupdr.CreateManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.CreateManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_management_server(request) + + +def test_create_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_create_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_management_server( + backupdr.CreateManagementServerRequest(), + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + + +def test_create_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.DeleteManagementServerRequest, + dict, + ], +) +def test_delete_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_management_server + ] = mock_rpc + + request = {} + client.delete_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_management_server_rest_required_fields( + request_type=backupdr.DeleteManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.DeleteManagementServerRequest.pb( + backupdr.DeleteManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.DeleteManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_management_server(request) + + +def test_delete_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_management_server( + backupdr.DeleteManagementServerRequest(), + name="name_value", + ) + + +def test_delete_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_vault + ] = mock_rpc + + request = {} + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_vault_rest_required_fields( + request_type=backupvault.CreateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_vault_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_vault_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_vault(request) + + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupVaultId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupVaultId", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.CreateBackupVaultRequest.pb( + backupvault.CreateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.CreateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_vault(request) + + +def test_create_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +def test_create_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_vaults_rest_required_fields( + request_type=backupvault.ListBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupVaultsRequest.pb( + backupvault.ListBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupVaultsResponse.to_json( + backupvault.ListBackupVaultsResponse() + ) + + request = backupvault.ListBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupVaultsResponse() + + client.list_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_vaults(request) + + +def test_list_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_usable_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_usable_backup_vaults_rest_required_fields( + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_usable_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_usable_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( + backupvault.FetchUsableBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.FetchUsableBackupVaultsResponse.to_json( + backupvault.FetchUsableBackupVaultsResponse() + ) + + request = backupvault.FetchUsableBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.FetchUsableBackupVaultsResponse() + + client.fetch_usable_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_usable_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.FetchUsableBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_usable_backup_vaults(request) + + +def test_fetch_usable_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_usable_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" + % client.transport._host, + args[1], + ) + + +def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_usable_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_vault(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_vault_rest_required_fields( + request_type=backupvault.GetBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupVaultRequest.pb( + backupvault.GetBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.BackupVault.to_json( + backupvault.BackupVault() + ) + + request = backupvault.GetBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.BackupVault() + + client.get_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_vault(request) + + +def test_get_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +def test_get_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request_init["backup_vault"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_vault_rest_required_fields( + request_type=backupvault.UpdateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupVaultRequest.pb( + backupvault.UpdateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup_vault(request) + + +def test_update_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_vault_rest_required_fields( + request_type=backupvault.DeleteBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupVaultRequest.pb( + backupvault.DeleteBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_vault(request) + + +def test_delete_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +def test_delete_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_sources_rest_required_fields( + request_type=backupvault.ListDataSourcesRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListDataSourcesRequest.pb( + backupvault.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListDataSourcesResponse.to_json( + backupvault.ListDataSourcesResponse() + ) + + request = backupvault.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_rest_required_fields( + request_type=backupvault.GetDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetDataSourceRequest.pb( + backupvault.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.DataSource.to_json( + backupvault.DataSource() + ) + + request = backupvault.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.DataSource() + + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +def test_get_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request_init["data_source"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4", + "state": 1, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_count": 1278, + "etag": "etag_value", + "total_stored_bytes": 1946, + "config_state": 1, + "backup_config_info": { + "last_backup_state": 1, + "last_successful_backup_consistency_time": {}, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "gcp_backup_config": { + "backup_plan": "backup_plan_value", + "backup_plan_description": "backup_plan_description_value", + "backup_plan_association": "backup_plan_association_value", + "backup_plan_rules": [ + "backup_plan_rules_value1", + "backup_plan_rules_value2", + ], + }, + "backup_appliance_backup_config": { + "backup_appliance_name": "backup_appliance_name_value", + "backup_appliance_id": 1966, + "sla_id": 620, + "application_name": "application_name_value", + "host_name": "host_name_value", + "slt_name": "slt_name_value", + "slp_name": "slp_name_value", + }, + }, + "data_source_gcp_resource": { + "gcp_resourcename": "gcp_resourcename_value", + "location": "location_value", + "type_": "type__value", + "compute_instance_datasource_properties": { + "name": "name_value", + "description": "description_value", + "machine_type": "machine_type_value", + "total_disk_count": 1718, + "total_disk_size_gb": 1904, + }, + }, + "data_source_backup_appliance_application": { + "application_name": "application_name_value", + "backup_appliance": "backup_appliance_value", + "appliance_id": 1241, + "type_": "type__value", + "application_id": 1472, + "hostname": "hostname_value", + "host_id": 746, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_source(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_source_rest_required_fields( + request_type=backupvault.UpdateDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateDataSourceRequest.pb( + backupvault.UpdateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_source(request) + + +def test_update_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupsResponse.to_json( + backupvault.ListBackupsResponse() + ) + + request = backupvault.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.Backup.to_json(backupvault.Backup()) + + request = backupvault.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "enforced_retention_end_time": {}, + "expire_time": {}, + "consistency_time": {}, + "etag": "etag_value", + "state": 1, + "service_locks": [ + { + "lock_until_time": {}, + "backup_appliance_lock_info": { + "backup_appliance_id": 1966, + "backup_appliance_name": "backup_appliance_name_value", + "lock_reason": "lock_reason_value", + "job_name": "job_name_value", + "backup_image": "backup_image_value", + "sla_id": 620, + }, + "service_lock_info": {"operation": "operation_value"}, + } + ], + "backup_appliance_locks": {}, + "compute_instance_backup_properties": { + "description": "description_value", + "tags": {"items": ["items_value1", "items_value2"]}, + "machine_type": "machine_type_value", + "can_ip_forward": True, + "network_interface": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "internal_ipv6_prefix_length": 2831, + "name": "name_value", + "access_configs": [ + { + "type_": 1, + "name": "name_value", + "external_ip": "external_ip_value", + "external_ipv6": "external_ipv6_value", + "external_ipv6_prefix_length": 2837, + "set_public_ptr": True, + "public_ptr_domain_name": "public_ptr_domain_name_value", + "network_tier": 1, + } + ], + "ipv6_access_configs": {}, + "alias_ip_ranges": [ + { + "ip_cidr_range": "ip_cidr_range_value", + "subnetwork_range_name": "subnetwork_range_name_value", + } + ], + "stack_type": 1, + "ipv6_access_type": 1, + "queue_count": 1197, + "nic_type": 1, + "network_attachment": "network_attachment_value", + } + ], + "disk": [ + { + "initialize_params": { + "disk_name": "disk_name_value", + "replica_zones": [ + "replica_zones_value1", + "replica_zones_value2", + ], + }, + "device_name": "device_name_value", + "kind": "kind_value", + "disk_type_deprecated": 1, + "mode": 1, + "source": "source_value", + "index": 536, + "boot": True, + "auto_delete": True, + "license_": ["license__value1", "license__value2"], + "disk_interface": 1, + "guest_os_feature": [{"type_": 1}], + "disk_encryption_key": { + "raw_key": "raw_key_value", + "rsa_encrypted_key": "rsa_encrypted_key_value", + "kms_key_name": "kms_key_name_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "disk_size_gb": 1261, + "saved_state": 1, + "disk_type": "disk_type_value", + "type_": 1, + } + ], + "metadata": {"items": [{"key": "key_value", "value": "value_value"}]}, + "service_account": [ + {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} + ], + "scheduling": { + "on_host_maintenance": 1, + "automatic_restart": True, + "preemptible": True, + "node_affinities": [ + { + "key": "key_value", + "operator": 1, + "values": ["values_value1", "values_value2"], + } + ], + "min_node_cpus": 1379, + "provisioning_model": 1, + "instance_termination_action": 1, + "local_ssd_recovery_timeout": {"seconds": 751, "nanos": 543}, + }, + "guest_accelerator": [ + { + "accelerator_type": "accelerator_type_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "key_revocation_action_type": 1, + "source_instance": "source_instance_value", + "labels": {}, + }, + "backup_appliance_backup_properties": { + "generation_id": 1368, + "finalize_time": {}, + "recovery_range_start_time": {}, + "recovery_range_end_time": {}, + }, + "backup_type": 1, + "gcp_backup_plan_info": { + "backup_plan": "backup_plan_value", + "backup_plan_rule_id": "backup_plan_rule_id_value", + }, + "resource_size_bytes": 2056, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_rest_required_fields( + request_type=backupvault.UpdateBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupRequest.pb( + backupvault.UpdateBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields( + request_type=backupvault.DeleteBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupRequest.pb( + backupvault.DeleteBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_backup_rest_required_fields( + request_type=backupvault.RestoreBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_restore_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_restore_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.RestoreBackupRequest.pb( + backupvault.RestoreBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.RestoreBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_backup(request) + + +def test_restore_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restore_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" + % client.transport._host, + args[1], + ) + + +def test_restore_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +def test_restore_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_rules": [ + { + "rule_id": "rule_id_value", + "backup_retention_days": 2237, + "standard_schedule": { + "recurrence_type": 1, + "hourly_frequency": 1748, + "days_of_week": [1], + "days_of_month": [1387, 1388], + "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, + "months": [1], + "backup_window": { + "start_hour_of_day": 1820, + "end_hour_of_day": 1573, + }, + "time_zone": "time_zone_value", + }, + } + ], + "state": 1, + "resource_type": "resource_type_value", + "etag": "etag_value", + "backup_vault": "backup_vault_value", + "backup_vault_service_account": "backup_vault_service_account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_plan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan"][field])): + del request_init["backup_plan"][field][i][subfield] + else: + del request_init["backup_plan"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_rest_required_fields( + request_type=backupplan.CreateBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanId"] = "backup_plan_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == "backup_plan_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan(request) + + expected_params = [ + ( + "backupPlanId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupPlanId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanId", + "backupPlan", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.CreateBackupPlanRequest.pb( + backupplan.CreateBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.CreateBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan(request) + + +def test_create_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_rest_required_fields( + request_type=backupplan.GetBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.GetBackupPlanRequest.pb( + backupplan.GetBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.BackupPlan.to_json( + backupplan.BackupPlan() + ) + + request = backupplan.GetBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.BackupPlan() + + client.get_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_plan(request) + + +def test_get_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +def test_get_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_plans(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plans_rest_required_fields( + request_type=backupplan.ListBackupPlansRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_plans(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_plans_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_plans._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_plans_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_plans" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_plans" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.ListBackupPlansRequest.pb( + backupplan.ListBackupPlansRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.ListBackupPlansResponse.to_json( + backupplan.ListBackupPlansResponse() + ) + + request = backupplan.ListBackupPlansRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.ListBackupPlansResponse() + + client.list_backup_plans( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_plans_rest_bad_request( + transport: str = "rest", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_plans(request) + + +def test_list_backup_plans_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_plans(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplan.ListBackupPlansResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_plans(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + pages = list(client.list_backup_plans(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_plan_rest_required_fields( + request_type=backupplan.DeleteBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.DeleteBackupPlanRequest.pb( + backupplan.DeleteBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.DeleteBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_plan(request) + + +def test_delete_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +def test_delete_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan_association"] = { + "name": "name_value", + "resource_type": "resource_type_value", + "resource": "resource_value", + "backup_plan": "backup_plan_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "rules_config_info": [ + { + "rule_id": "rule_id_value", + "last_backup_state": 1, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_successful_backup_consistency_time": {}, + } + ], + "data_source": "data_source_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields[ + "backup_plan_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backup_plan_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan_association"][field])): + del request_init["backup_plan_association"][field][i][subfield] + else: + del request_init["backup_plan_association"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_association_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanAssociationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == request_init["backup_plan_association_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanAssociationId"] = "backup_plan_association_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == "backup_plan_association_id_value" + ) + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan_association(request) + + expected_params = [ + ( + "backupPlanAssociationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "backupPlanAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanAssociationId", + "backupPlanAssociation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan_association" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + backupplanassociation.CreateBackupPlanAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplanassociation.CreateBackupPlanAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan_association(request) + + +def test_create_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), ) client = BackupDRClient(transport=transport) @@ -3105,14 +21997,14 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_management_servers" + transports.BackupDRRestInterceptor, "post_get_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_management_servers" + transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.ListManagementServersRequest.pb( - backupdr.ListManagementServersRequest() + pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( + backupplanassociation.GetBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3124,19 +22016,19 @@ def test_list_management_servers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ListManagementServersResponse.to_json( - backupdr.ListManagementServersResponse() + req.return_value._content = backupplanassociation.BackupPlanAssociation.to_json( + backupplanassociation.BackupPlanAssociation() ) - request = backupdr.ListManagementServersRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ListManagementServersResponse() + post.return_value = backupplanassociation.BackupPlanAssociation() - client.list_management_servers( + client.get_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3148,8 +22040,9 @@ def test_list_management_servers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_management_servers_rest_bad_request( - transport: str = "rest", request_type=backupdr.ListManagementServersRequest +def test_get_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3157,7 +22050,9 @@ def test_list_management_servers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3169,10 +22064,10 @@ def test_list_management_servers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_management_servers(request) + client.get_backup_plan_association(request) -def test_list_management_servers_rest_flattened(): +def test_get_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3181,14 +22076,16 @@ def test_list_management_servers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() + return_value = backupplanassociation.BackupPlanAssociation() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -3196,159 +22093,90 @@ def test_list_management_servers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_management_servers(**mock_args) + client.get_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_list_management_servers_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent="parent_value", - ) - - -def test_list_management_servers_rest_pager(transport: str = "rest"): +def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token="abc", - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token="def", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token="ghi", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupdr.ListManagementServersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_management_servers(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) for i in results) - pages = list(client.list_management_servers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - backupdr.GetManagementServerRequest, + backupplanassociation.ListBackupPlanAssociationsRequest, dict, ], ) -def test_get_management_server_rest(request_type): +def test_list_backup_plan_associations_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer( - name="name_value", - description="description_value", - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag="etag_value", - oauth2_client_id="oauth2_client_id_value", - ba_proxy_uri=["ba_proxy_uri_value"], - satisfies_pzi=True, + return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == "etag_value" - assert response.oauth2_client_id == "oauth2_client_id_value" - assert response.ba_proxy_uri == ["ba_proxy_uri_value"] - assert response.satisfies_pzi is True + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_management_server_rest_use_cached_wrapped_rpc(): +def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3363,7 +22191,7 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_management_server + client._transport.list_backup_plan_associations in client._transport._wrapped_methods ) @@ -3373,29 +22201,29 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_management_server + client._transport.list_backup_plan_associations ] = mock_rpc request = {} - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_management_server_rest_required_fields( - request_type=backupdr.GetManagementServerRequest, +def test_list_backup_plan_associations_rest_required_fields( + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3406,21 +22234,29 @@ def test_get_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3429,7 +22265,7 @@ def test_get_management_server_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3450,30 +22286,43 @@ def test_get_management_server_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_management_server_rest_unset_required_fields(): +def test_list_backup_plan_associations_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_plan_associations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_management_server_rest_interceptors(null_interceptor): +def test_list_backup_plan_associations_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3484,14 +22333,14 @@ def test_get_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_management_server" + transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_management_server" + transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.GetManagementServerRequest.pb( - backupdr.GetManagementServerRequest() + pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + backupplanassociation.ListBackupPlanAssociationsRequest() ) transcode.return_value = { "method": "post", @@ -3503,19 +22352,21 @@ def test_get_management_server_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ManagementServer.to_json( - backupdr.ManagementServer() + req.return_value._content = ( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) ) - request = backupdr.GetManagementServerRequest() + request = backupplanassociation.ListBackupPlanAssociationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ManagementServer() + post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.get_management_server( + client.list_backup_plan_associations( request, metadata=[ ("key", "val"), @@ -3527,8 +22378,9 @@ def test_get_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.GetManagementServerRequest +def test_list_backup_plan_associations_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3536,9 +22388,7 @@ def test_get_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3550,10 +22400,10 @@ def test_get_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_management_server(request) + client.list_backup_plan_associations(request) -def test_get_management_server_rest_flattened(): +def test_list_backup_plan_associations_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3562,16 +22412,14 @@ def test_get_management_server_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -3579,25 +22427,27 @@ def test_get_management_server_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_management_server(**mock_args) + client.list_backup_plan_associations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" % client.transport._host, args[1], ) -def test_get_management_server_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3606,124 +22456,95 @@ def test_get_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name="name_value", + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", ) -def test_get_management_server_rest_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - backupdr.CreateManagementServerRequest, - dict, - ], -) -def test_create_management_server_rest(request_type): +def test_list_backup_plan_associations_rest_pager(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["management_server"] = { - "name": "name_value", - "description": "description_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "type_": 1, - "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, - "workforce_identity_based_management_uri": { - "first_party_management_uri": "first_party_management_uri_value", - "third_party_management_uri": "third_party_management_uri_value", - }, - "state": 1, - "networks": [{"network": "network_value", "peering_mode": 1}], - "etag": "etag_value", - "oauth2_client_id": "oauth2_client_id_value", - "workforce_identity_based_oauth2_client_id": { - "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", - "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", - }, - "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], - "satisfies_pzs": {"value": True}, - "satisfies_pzi": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = {"parent": "projects/sample1/locations/sample2"} - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_backup_plan_associations(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) - subfields_not_in_runtime = [] + pages = list(client.list_backup_plan_associations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["management_server"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["management_server"][field])): - del request_init["management_server"][field][i][subfield] - else: - del request_init["management_server"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3738,13 +22559,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_management_server_rest_use_cached_wrapped_rpc(): +def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3759,7 +22580,7 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_management_server + client._transport.delete_backup_plan_association in client._transport._wrapped_methods ) @@ -3769,11 +22590,11 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_management_server + client._transport.delete_backup_plan_association ] = mock_rpc request = {} - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3782,21 +22603,20 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_management_server_rest_required_fields( - request_type=backupdr.CreateManagementServerRequest, +def test_delete_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" - request_init["management_server_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3804,39 +22624,26 @@ def test_create_management_server_rest_required_fields( ) # verify fields with default values are dropped - assert "managementServerId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "managementServerId" in jsonified_request - assert ( - jsonified_request["managementServerId"] == request_init["management_server_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["managementServerId"] = "management_server_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "management_server_id", - "request_id", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == "management_server_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3857,10 +22664,9 @@ def test_create_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3870,44 +22676,26 @@ def test_create_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) - expected_params = [ - ( - "managementServerId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_management_server_rest_unset_required_fields(): +def test_delete_backup_plan_association_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_management_server._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "managementServerId", - "requestId", - ) - ) - & set( - ( - "parent", - "managementServerId", - "managementServer", - ) - ) + unset_fields = transport.delete_backup_plan_association._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_management_server_rest_interceptors(null_interceptor): +def test_delete_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3920,14 +22708,14 @@ def test_create_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_management_server" + transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_management_server" + transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.CreateManagementServerRequest.pb( - backupdr.CreateManagementServerRequest() + pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + backupplanassociation.DeleteBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3943,7 +22731,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.CreateManagementServerRequest() + request = backupplanassociation.DeleteBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3951,7 +22739,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_management_server( + client.delete_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3963,8 +22751,9 @@ def test_create_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +def test_delete_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3972,7 +22761,9 @@ def test_create_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3984,10 +22775,10 @@ def test_create_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_management_server(request) + client.delete_backup_plan_association(request) -def test_create_management_server_rest_flattened(): +def test_delete_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3999,13 +22790,13 @@ def test_create_management_server_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -4016,20 +22807,20 @@ def test_create_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_management_server(**mock_args) + client.delete_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_create_management_server_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4038,15 +22829,13 @@ def test_create_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) -def test_create_management_server_rest_error(): +def test_delete_backup_plan_association_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4055,11 +22844,11 @@ def test_create_management_server_rest_error(): @pytest.mark.parametrize( "request_type", [ - backupdr.DeleteManagementServerRequest, + backupplanassociation.TriggerBackupRequest, dict, ], ) -def test_delete_management_server_rest(request_type): +def test_trigger_backup_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4067,7 +22856,7 @@ def test_delete_management_server_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4083,13 +22872,13 @@ def test_delete_management_server_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_management_server_rest_use_cached_wrapped_rpc(): +def test_trigger_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4103,22 +22892,17 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_management_server - in client._transport._wrapped_methods - ) + assert client._transport.trigger_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_management_server - ] = mock_rpc + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc request = {} - client.delete_management_server(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4127,20 +22911,21 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_management_server(request) + client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_management_server_rest_required_fields( - request_type=backupdr.DeleteManagementServerRequest, +def test_trigger_backup_rest_required_fields( + request_type=backupplanassociation.TriggerBackupRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} request_init["name"] = "" + request_init["rule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4151,23 +22936,24 @@ def test_delete_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["ruleId"] = "rule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "ruleId" in jsonified_request + assert jsonified_request["ruleId"] == "rule_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4188,9 +22974,10 @@ def test_delete_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4200,24 +22987,32 @@ def test_delete_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_management_server_rest_unset_required_fields(): +def test_trigger_backup_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.trigger_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "ruleId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_management_server_rest_interceptors(null_interceptor): +def test_trigger_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -4230,14 +23025,14 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_management_server" + transports.BackupDRRestInterceptor, "post_trigger_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_management_server" + transports.BackupDRRestInterceptor, "pre_trigger_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.DeleteManagementServerRequest.pb( - backupdr.DeleteManagementServerRequest() + pb_message = backupplanassociation.TriggerBackupRequest.pb( + backupplanassociation.TriggerBackupRequest() ) transcode.return_value = { "method": "post", @@ -4253,7 +23048,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.DeleteManagementServerRequest() + request = backupplanassociation.TriggerBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4261,7 +23056,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_management_server( + client.trigger_backup( request, metadata=[ ("key", "val"), @@ -4273,8 +23068,8 @@ def test_delete_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +def test_trigger_backup_rest_bad_request( + transport: str = "rest", request_type=backupplanassociation.TriggerBackupRequest ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4283,7 +23078,7 @@ def test_delete_management_server_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4296,10 +23091,10 @@ def test_delete_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_management_server(request) + client.trigger_backup(request) -def test_delete_management_server_rest_flattened(): +def test_trigger_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4312,12 +23107,13 @@ def test_delete_management_server_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", + rule_id="rule_id_value", ) mock_args.update(sample_request) @@ -4328,20 +23124,20 @@ def test_delete_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_management_server(**mock_args) + client.trigger_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" % client.transport._host, args[1], ) -def test_delete_management_server_rest_flattened_error(transport: str = "rest"): +def test_trigger_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4350,13 +23146,14 @@ def test_delete_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) -def test_delete_management_server_rest_error(): +def test_trigger_backup_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4505,6 +23302,29 @@ def test_backup_dr_base_transport(): "get_management_server", "create_management_server", "delete_management_server", + "create_backup_vault", + "list_backup_vaults", + "fetch_usable_backup_vaults", + "get_backup_vault", + "update_backup_vault", + "delete_backup_vault", + "list_data_sources", + "get_data_source", + "update_data_source", + "list_backups", + "get_backup", + "update_backup", + "delete_backup", + "restore_backup", + "create_backup_plan", + "get_backup_plan", + "list_backup_plans", + "delete_backup_plan", + "create_backup_plan_association", + "get_backup_plan_association", + "list_backup_plan_associations", + "delete_backup_plan_association", + "trigger_backup", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -4803,6 +23623,75 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.delete_management_server._session session2 = client2.transport.delete_management_server._session assert session1 != session2 + session1 = client1.transport.create_backup_vault._session + session2 = client2.transport.create_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_backup_vaults._session + session2 = client2.transport.list_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.fetch_usable_backup_vaults._session + session2 = client2.transport.fetch_usable_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.get_backup_vault._session + session2 = client2.transport.get_backup_vault._session + assert session1 != session2 + session1 = client1.transport.update_backup_vault._session + session2 = client2.transport.update_backup_vault._session + assert session1 != session2 + session1 = client1.transport.delete_backup_vault._session + session2 = client2.transport.delete_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.update_data_source._session + session2 = client2.transport.update_data_source._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.restore_backup._session + session2 = client2.transport.restore_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan._session + session2 = client2.transport.create_backup_plan._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan._session + session2 = client2.transport.get_backup_plan._session + assert session1 != session2 + session1 = client1.transport.list_backup_plans._session + session2 = client2.transport.list_backup_plans._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan._session + session2 = client2.transport.delete_backup_plan._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan_association._session + session2 = client2.transport.create_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan_association._session + session2 = client2.transport.get_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.list_backup_plan_associations._session + session2 = client2.transport.list_backup_plan_associations._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan_association._session + session2 = client2.transport.delete_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.trigger_backup._session + session2 = client2.transport.trigger_backup._session + assert session1 != session2 def test_backup_dr_grpc_transport_channel(): @@ -4957,6 +23846,153 @@ def test_backup_dr_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_backup_path(): + project = "squid" + location = "clam" + backupvault = "whelk" + datasource = "octopus" + backup = "oyster" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + actual = BackupDRClient.backup_path( + project, location, backupvault, datasource, backup + ) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "backupvault": "mussel", + "datasource": "winkle", + "backup": "nautilus", + } + path = BackupDRClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_path(path) + assert expected == actual + + +def test_backup_plan_path(): + project = "scallop" + location = "abalone" + backup_plan = "squid" + expected = ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + actual = BackupDRClient.backup_plan_path(project, location, backup_plan) + assert expected == actual + + +def test_parse_backup_plan_path(): + expected = { + "project": "clam", + "location": "whelk", + "backup_plan": "octopus", + } + path = BackupDRClient.backup_plan_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_path(path) + assert expected == actual + + +def test_backup_plan_association_path(): + project = "oyster" + location = "nudibranch" + backup_plan_association = "cuttlefish" + expected = "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + actual = BackupDRClient.backup_plan_association_path( + project, location, backup_plan_association + ) + assert expected == actual + + +def test_parse_backup_plan_association_path(): + expected = { + "project": "mussel", + "location": "winkle", + "backup_plan_association": "nautilus", + } + path = BackupDRClient.backup_plan_association_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_association_path(path) + assert expected == actual + + +def test_backup_vault_path(): + project = "scallop" + location = "abalone" + backupvault = "squid" + expected = ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + actual = BackupDRClient.backup_vault_path(project, location, backupvault) + assert expected == actual + + +def test_parse_backup_vault_path(): + expected = { + "project": "clam", + "location": "whelk", + "backupvault": "octopus", + } + path = BackupDRClient.backup_vault_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_vault_path(path) + assert expected == actual + + +def test_data_source_path(): + project = "oyster" + location = "nudibranch" + backupvault = "cuttlefish" + datasource = "mussel" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + actual = BackupDRClient.data_source_path(project, location, backupvault, datasource) + assert expected == actual + + +def test_parse_data_source_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "backupvault": "scallop", + "datasource": "abalone", + } + path = BackupDRClient.data_source_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_data_source_path(path) + assert expected == actual + + def test_management_server_path(): project = "squid" location = "clam" From e4ac435aaa9508e33090091232ff35df860bfd37 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:48:29 -0400 Subject: [PATCH 55/59] feat: [google-cloud-contact-center-insights] Add import / export IssueModel (#13132) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: Add CMEK InitializeLroSpec feat: Add metadata import to IngestConversations feat: Add sampling to IngestConversations docs: Add a comment for valid `order_by` values in ListConversations docs: Add a comment for valid `update_mask` values in UpdateConversation feat: Add import / export IssueModel END_COMMIT_OVERRIDE PiperOrigin-RevId: 683188578 Source-Link: https://github.com/googleapis/googleapis/commit/d0eeab38c8f11c090f05c332f2374b556ae36644 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4d5b300a7249ce24278fbe77c16983a06d6e4a5d Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhY3QtY2VudGVyLWluc2lnaHRzLy5Pd2xCb3QueWFtbCIsImgiOiI0ZDViMzAwYTcyNDljZTI0Mjc4ZmJlNzdjMTY5ODNhMDZkNmU0YTVkIn0= --------- Co-authored-by: Owl Bot --- .../cloud/contact_center_insights/__init__.py | 24 + .../contact_center_insights_v1/__init__.py | 24 + .../gapic_metadata.json | 60 + .../contact_center_insights/async_client.py | 548 +- .../contact_center_insights/client.py | 552 +- .../transports/base.py | 56 + .../transports/grpc.py | 129 +- .../transports/grpc_asyncio.py | 152 +- .../transports/rest.py | 570 +- .../types/__init__.py | 24 + .../types/contact_center_insights.py | 342 +- .../types/resources.py | 175 +- ...enter_insights_export_issue_model_async.py | 60 + ...center_insights_export_issue_model_sync.py | 60 + ...nter_insights_get_encryption_spec_async.py | 52 + ...enter_insights_get_encryption_spec_sync.py | 52 + ...enter_insights_import_issue_model_async.py | 60 + ...center_insights_import_issue_model_sync.py | 60 + ...sights_initialize_encryption_spec_async.py | 59 + ...nsights_initialize_encryption_spec_sync.py | 59 + ...google.cloud.contactcenterinsights.v1.json | 966 +- ...xup_contact_center_insights_v1_keywords.py | 8 +- .../test_contact_center_insights.py | 14094 ++++++++++------ 23 files changed, 12380 insertions(+), 5806 deletions(-) create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py index f19179e38c46..13964e6e628e 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py @@ -56,16 +56,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -101,11 +111,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -170,16 +182,26 @@ "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "ListAnalysesRequest", "ListAnalysesResponse", "ListConversationsRequest", @@ -214,11 +236,13 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py index 5f169fc674d2..8548409a4056 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py @@ -54,16 +54,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -99,11 +109,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -158,6 +170,7 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "ConversationView", @@ -181,25 +194,36 @@ "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "FaqAnswerData", "GcsSource", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", "HoldData", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "Intent", "IntentMatchData", "InterruptionData", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json index af5c8ce82e8f..0e9a96c732d4 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json @@ -95,6 +95,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -105,6 +110,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -130,11 +140,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" @@ -295,6 +315,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -305,6 +330,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -330,11 +360,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" @@ -495,6 +535,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -505,6 +550,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -530,11 +580,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py index 3e0bb0884cb2..e792496b0905 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py @@ -89,6 +89,12 @@ class ContactCenterInsightsAsyncClient: parse_conversation_profile_path = staticmethod( ContactCenterInsightsClient.parse_conversation_profile_path ) + encryption_spec_path = staticmethod( + ContactCenterInsightsClient.encryption_spec_path + ) + parse_encryption_spec_path = staticmethod( + ContactCenterInsightsClient.parse_encryption_spec_path + ) issue_path = staticmethod(ContactCenterInsightsClient.issue_path) parse_issue_path = staticmethod(ContactCenterInsightsClient.parse_issue_path) issue_model_path = staticmethod(ContactCenterInsightsClient.issue_model_path) @@ -320,7 +326,9 @@ async def create_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.Conversation: - r"""Creates a conversation. + r"""Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. .. code-block:: python @@ -448,9 +456,9 @@ async def upload_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + r"""Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. .. code-block:: python @@ -590,7 +598,21 @@ async def sample_update_conversation(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. + The list of fields to be updated. All possible fields + can be updated by passing ``*``, or a subset of the + following updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2801,6 +2823,256 @@ async def sample_undeploy_issue_model(): # Done; return the response. return response + async def export_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ExportIssueModelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports an issue model to the provided destination. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest, dict]]): + The request object. Request to export an issue model. + name (:class:`str`): + Required. The issue model to export. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ExportIssueModelResponse` + Response from export issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ExportIssueModelRequest): + request = contact_center_insights.ExportIssueModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_issue_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.ExportIssueModelResponse, + metadata_type=contact_center_insights.ExportIssueModelMetadata, + ) + + # Done; return the response. + return response + + async def import_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ImportIssueModelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports an issue model from a Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest, dict]]): + The request object. Request to import an issue model. + parent (:class:`str`): + Required. The parent resource of the + issue model. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ImportIssueModelResponse` + Response from import issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ImportIssueModelRequest): + request = contact_center_insights.ImportIssueModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_issue_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.ImportIssueModelResponse, + metadata_type=contact_center_insights.ImportIssueModelMetadata, + ) + + # Done; return the response. + return response + async def get_issue( self, request: Optional[Union[contact_center_insights.GetIssueRequest, dict]] = None, @@ -4061,7 +4333,13 @@ async def sample_get_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4172,7 +4450,13 @@ async def sample_update_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4224,6 +4508,256 @@ async def sample_update_settings(): # Done; return the response. return response + async def get_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.GetEncryptionSpecRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest, dict]]): + The request object. The request to get location-level + encryption specification. + name (:class:`str`): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contact_center_insights_v1.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.GetEncryptionSpecRequest): + request = contact_center_insights.GetEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def initialize_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[resources.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest, dict]]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (:class:`google.cloud.contact_center_insights_v1.types.EncryptionSpec`): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, contact_center_insights.InitializeEncryptionSpecRequest + ): + request = contact_center_insights.InitializeEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.InitializeEncryptionSpecResponse, + metadata_type=contact_center_insights.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + async def create_view( self, request: Optional[ diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index 5a264ad26d88..a81a8b69ce6f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -266,6 +266,26 @@ def parse_conversation_profile_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def encryption_spec_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified encryption_spec string.""" + return "projects/{project}/locations/{location}/encryptionSpec".format( + project=project, + location=location, + ) + + @staticmethod + def parse_encryption_spec_path(path: str) -> Dict[str, str]: + """Parses a encryption_spec path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/encryptionSpec$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def issue_path( project: str, @@ -915,7 +935,9 @@ def create_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.Conversation: - r"""Creates a conversation. + r"""Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. .. code-block:: python @@ -1040,9 +1062,9 @@ def upload_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + r"""Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. .. code-block:: python @@ -1180,7 +1202,21 @@ def sample_update_conversation(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. + The list of fields to be updated. All possible fields + can be updated by passing ``*``, or a subset of the + following updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -3338,6 +3374,250 @@ def sample_undeploy_issue_model(): # Done; return the response. return response + def export_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ExportIssueModelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports an issue model to the provided destination. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest, dict]): + The request object. Request to export an issue model. + name (str): + Required. The issue model to export. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ExportIssueModelResponse` + Response from export issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ExportIssueModelRequest): + request = contact_center_insights.ExportIssueModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_issue_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.ExportIssueModelResponse, + metadata_type=contact_center_insights.ExportIssueModelMetadata, + ) + + # Done; return the response. + return response + + def import_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ImportIssueModelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports an issue model from a Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest, dict]): + The request object. Request to import an issue model. + parent (str): + Required. The parent resource of the + issue model. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ImportIssueModelResponse` + Response from import issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ImportIssueModelRequest): + request = contact_center_insights.ImportIssueModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_issue_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.ImportIssueModelResponse, + metadata_type=contact_center_insights.ImportIssueModelMetadata, + ) + + # Done; return the response. + return response + def get_issue( self, request: Optional[Union[contact_center_insights.GetIssueRequest, dict]] = None, @@ -4567,7 +4847,13 @@ def sample_get_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4675,7 +4961,13 @@ def sample_update_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4724,6 +5016,252 @@ def sample_update_settings(): # Done; return the response. return response + def get_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.GetEncryptionSpecRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest, dict]): + The request object. The request to get location-level + encryption specification. + name (str): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contact_center_insights_v1.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.GetEncryptionSpecRequest): + request = contact_center_insights.GetEncryptionSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_encryption_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def initialize_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[resources.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest, dict]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (google.cloud.contact_center_insights_v1.types.EncryptionSpec): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, contact_center_insights.InitializeEncryptionSpecRequest + ): + request = contact_center_insights.InitializeEncryptionSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.InitializeEncryptionSpecResponse, + metadata_type=contact_center_insights.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + def create_view( self, request: Optional[ diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py index 36ab3e540e1a..1ccd673466bf 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py @@ -238,6 +238,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_issue_model: gapic_v1.method.wrap_method( + self.export_issue_model, + default_timeout=None, + client_info=client_info, + ), + self.import_issue_model: gapic_v1.method.wrap_method( + self.import_issue_model, + default_timeout=None, + client_info=client_info, + ), self.get_issue: gapic_v1.method.wrap_method( self.get_issue, default_timeout=None, @@ -303,6 +313,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_encryption_spec: gapic_v1.method.wrap_method( + self.get_encryption_spec, + default_timeout=None, + client_info=client_info, + ), + self.initialize_encryption_spec: gapic_v1.method.wrap_method( + self.initialize_encryption_spec, + default_timeout=None, + client_info=client_info, + ), self.create_view: gapic_v1.method.wrap_method( self.create_view, default_timeout=None, @@ -542,6 +562,24 @@ def undeploy_issue_model( ]: raise NotImplementedError() + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_issue( self, @@ -671,6 +709,24 @@ def update_settings( ]: raise NotImplementedError() + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], + Union[resources.EncryptionSpec, Awaitable[resources.EncryptionSpec]], + ]: + raise NotImplementedError() + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_view( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py index 9028aa2b9559..adb2ad6f700c 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py @@ -263,7 +263,9 @@ def create_conversation( ]: r"""Return a callable for the create conversation method over gRPC. - Creates a conversation. + Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. Returns: Callable[[~.CreateConversationRequest], @@ -291,9 +293,9 @@ def upload_conversation( ]: r"""Return a callable for the upload conversation method over gRPC. - Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. Returns: Callable[[~.UploadConversationRequest], @@ -849,6 +851,62 @@ def undeploy_issue_model( ) return self._stubs["undeploy_issue_model"] + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the export issue model method over gRPC. + + Exports an issue model to the provided destination. + + Returns: + Callable[[~.ExportIssueModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_issue_model" not in self._stubs: + self._stubs["export_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ExportIssueModel", + request_serializer=contact_center_insights.ExportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_issue_model"] + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the import issue model method over gRPC. + + Imports an issue model from a Cloud Storage bucket. + + Returns: + Callable[[~.ImportIssueModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_issue_model" not in self._stubs: + self._stubs["import_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ImportIssueModel", + request_serializer=contact_center_insights.ImportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_issue_model"] + @property def get_issue( self, @@ -1207,6 +1265,69 @@ def update_settings( ) return self._stubs["update_settings"] + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], resources.EncryptionSpec + ]: + r"""Return a callable for the get encryption spec method over gRPC. + + Gets location-level encryption key specification. + + Returns: + Callable[[~.GetEncryptionSpecRequest], + ~.EncryptionSpec]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_encryption_spec" not in self._stubs: + self._stubs["get_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/GetEncryptionSpec", + request_serializer=contact_center_insights.GetEncryptionSpecRequest.serialize, + response_deserializer=resources.EncryptionSpec.deserialize, + ) + return self._stubs["get_encryption_spec"] + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the initialize encryption spec method over gRPC. + + Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + Returns: + Callable[[~.InitializeEncryptionSpecRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_encryption_spec" not in self._stubs: + self._stubs["initialize_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/InitializeEncryptionSpec", + request_serializer=contact_center_insights.InitializeEncryptionSpecRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_encryption_spec"] + @property def create_view( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py index f04fd8885c78..abfe06f88262 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py @@ -270,7 +270,9 @@ def create_conversation( ]: r"""Return a callable for the create conversation method over gRPC. - Creates a conversation. + Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. Returns: Callable[[~.CreateConversationRequest], @@ -299,9 +301,9 @@ def upload_conversation( ]: r"""Return a callable for the upload conversation method over gRPC. - Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. Returns: Callable[[~.UploadConversationRequest], @@ -875,6 +877,64 @@ def undeploy_issue_model( ) return self._stubs["undeploy_issue_model"] + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the export issue model method over gRPC. + + Exports an issue model to the provided destination. + + Returns: + Callable[[~.ExportIssueModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_issue_model" not in self._stubs: + self._stubs["export_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ExportIssueModel", + request_serializer=contact_center_insights.ExportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_issue_model"] + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the import issue model method over gRPC. + + Imports an issue model from a Cloud Storage bucket. + + Returns: + Callable[[~.ImportIssueModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_issue_model" not in self._stubs: + self._stubs["import_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ImportIssueModel", + request_serializer=contact_center_insights.ImportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_issue_model"] + @property def get_issue( self, @@ -1246,6 +1306,70 @@ def update_settings( ) return self._stubs["update_settings"] + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], + Awaitable[resources.EncryptionSpec], + ]: + r"""Return a callable for the get encryption spec method over gRPC. + + Gets location-level encryption key specification. + + Returns: + Callable[[~.GetEncryptionSpecRequest], + Awaitable[~.EncryptionSpec]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_encryption_spec" not in self._stubs: + self._stubs["get_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/GetEncryptionSpec", + request_serializer=contact_center_insights.GetEncryptionSpecRequest.serialize, + response_deserializer=resources.EncryptionSpec.deserialize, + ) + return self._stubs["get_encryption_spec"] + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the initialize encryption spec method over gRPC. + + Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + Returns: + Callable[[~.InitializeEncryptionSpecRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_encryption_spec" not in self._stubs: + self._stubs["initialize_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/InitializeEncryptionSpec", + request_serializer=contact_center_insights.InitializeEncryptionSpecRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_encryption_spec"] + @property def create_view( self, @@ -1493,6 +1617,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_issue_model: gapic_v1.method_async.wrap_method( + self.export_issue_model, + default_timeout=None, + client_info=client_info, + ), + self.import_issue_model: gapic_v1.method_async.wrap_method( + self.import_issue_model, + default_timeout=None, + client_info=client_info, + ), self.get_issue: gapic_v1.method_async.wrap_method( self.get_issue, default_timeout=None, @@ -1558,6 +1692,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_encryption_spec: gapic_v1.method_async.wrap_method( + self.get_encryption_spec, + default_timeout=None, + client_info=client_info, + ), + self.initialize_encryption_spec: gapic_v1.method_async.wrap_method( + self.initialize_encryption_spec, + default_timeout=None, + client_info=client_info, + ), self.create_view: gapic_v1.method_async.wrap_method( self.create_view, default_timeout=None, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py index ab1260ec3b6c..4c44d1bd3e8a 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py @@ -191,6 +191,14 @@ def post_export_insights_data(self, response): logging.log(f"Received response: {response}") return response + def pre_export_issue_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_issue_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_analysis(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -207,6 +215,14 @@ def post_get_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_get_encryption_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_encryption_spec(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_issue(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -247,6 +263,14 @@ def post_get_view(self, response): logging.log(f"Received response: {response}") return response + def pre_import_issue_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_issue_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_ingest_conversations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -255,6 +279,14 @@ def post_ingest_conversations(self, response): logging.log(f"Received response: {response}") return response + def pre_initialize_encryption_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_initialize_encryption_spec(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_analyses(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -738,6 +770,31 @@ def post_export_insights_data( """ return response + def pre_export_issue_model( + self, + request: contact_center_insights.ExportIssueModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.ExportIssueModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for export_issue_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_export_issue_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_issue_model + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_get_analysis( self, request: contact_center_insights.GetAnalysisRequest, @@ -784,6 +841,31 @@ def post_get_conversation( """ return response + def pre_get_encryption_spec( + self, + request: contact_center_insights.GetEncryptionSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.GetEncryptionSpecRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_encryption_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_get_encryption_spec( + self, response: resources.EncryptionSpec + ) -> resources.EncryptionSpec: + """Post-rpc interceptor for get_encryption_spec + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_get_issue( self, request: contact_center_insights.GetIssueRequest, @@ -895,6 +977,31 @@ def post_get_view(self, response: resources.View) -> resources.View: """ return response + def pre_import_issue_model( + self, + request: contact_center_insights.ImportIssueModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.ImportIssueModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for import_issue_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_import_issue_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_issue_model + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_ingest_conversations( self, request: contact_center_insights.IngestConversationsRequest, @@ -920,6 +1027,32 @@ def post_ingest_conversations( """ return response + def pre_initialize_encryption_spec( + self, + request: contact_center_insights.InitializeEncryptionSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.InitializeEncryptionSpecRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for initialize_encryption_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_initialize_encryption_spec( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for initialize_encryption_spec + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_list_analyses( self, request: contact_center_insights.ListAnalysesRequest, @@ -2960,6 +3093,101 @@ def __call__( resp = self._interceptor.post_export_insights_data(resp) return resp + class _ExportIssueModel(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("ExportIssueModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.ExportIssueModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export issue model method over HTTP. + + Args: + request (~.contact_center_insights.ExportIssueModelRequest): + The request object. Request to export an issue model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/issueModels/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_issue_model( + request, metadata + ) + pb_request = contact_center_insights.ExportIssueModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_issue_model(resp) + return resp + class _GetAnalysis(ContactCenterInsightsRestStub): def __hash__(self): return hash("GetAnalysis") @@ -3132,6 +3360,97 @@ def __call__( resp = self._interceptor.post_get_conversation(resp) return resp + class _GetEncryptionSpec(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("GetEncryptionSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.GetEncryptionSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Call the get encryption spec method over HTTP. + + Args: + request (~.contact_center_insights.GetEncryptionSpecRequest): + The request object. The request to get location-level + encryption specification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/encryptionSpec}", + }, + ] + request, metadata = self._interceptor.pre_get_encryption_spec( + request, metadata + ) + pb_request = contact_center_insights.GetEncryptionSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.EncryptionSpec() + pb_resp = resources.EncryptionSpec.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_encryption_spec(resp) + return resp + class _GetIssue(ContactCenterInsightsRestStub): def __hash__(self): return hash("GetIssue") @@ -3426,7 +3745,13 @@ def __call__( Returns: ~.resources.Settings: - The settings resource. + The CCAI Insights project wide settings. Use these + settings to configure the behavior of Insights. View + these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. + """ http_options: List[Dict[str, str]] = [ @@ -3561,6 +3886,101 @@ def __call__( resp = self._interceptor.post_get_view(resp) return resp + class _ImportIssueModel(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("ImportIssueModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.ImportIssueModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import issue model method over HTTP. + + Args: + request (~.contact_center_insights.ImportIssueModelRequest): + The request object. Request to import an issue model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/issueModels:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_issue_model( + request, metadata + ) + pb_request = contact_center_insights.ImportIssueModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_issue_model(resp) + return resp + class _IngestConversations(ContactCenterInsightsRestStub): def __hash__(self): return hash("IngestConversations") @@ -3656,6 +4076,105 @@ def __call__( resp = self._interceptor.post_ingest_conversations(resp) return resp + class _InitializeEncryptionSpec(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("InitializeEncryptionSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.InitializeEncryptionSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the initialize encryption + spec method over HTTP. + + Args: + request (~.contact_center_insights.InitializeEncryptionSpecRequest): + The request object. The request to initialize a + location-level encryption specification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{encryption_spec.name=projects/*/locations/*/encryptionSpec}:initialize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_initialize_encryption_spec( + request, metadata + ) + pb_request = contact_center_insights.InitializeEncryptionSpecRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_initialize_encryption_spec(resp) + return resp + class _ListAnalyses(ContactCenterInsightsRestStub): def __hash__(self): return hash("ListAnalyses") @@ -4685,7 +5204,13 @@ def __call__( Returns: ~.resources.Settings: - The settings resource. + The CCAI Insights project wide settings. Use these + settings to configure the behavior of Insights. View + these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. + """ http_options: List[Dict[str, str]] = [ @@ -5093,6 +5618,16 @@ def export_insights_data( # In C++ this would require a dynamic_cast return self._ExportInsightsData(self._session, self._host, self._interceptor) # type: ignore + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportIssueModel(self._session, self._host, self._interceptor) # type: ignore + @property def get_analysis( self, @@ -5111,6 +5646,16 @@ def get_conversation( # In C++ this would require a dynamic_cast return self._GetConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], resources.EncryptionSpec + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEncryptionSpec(self._session, self._host, self._interceptor) # type: ignore + @property def get_issue( self, @@ -5153,6 +5698,16 @@ def get_view( # In C++ this would require a dynamic_cast return self._GetView(self._session, self._host, self._interceptor) # type: ignore + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportIssueModel(self._session, self._host, self._interceptor) # type: ignore + @property def ingest_conversations( self, @@ -5163,6 +5718,17 @@ def ingest_conversations( # In C++ this would require a dynamic_cast return self._IngestConversations(self._session, self._host, self._interceptor) # type: ignore + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InitializeEncryptionSpec(self._session, self._host, self._interceptor) # type: ignore + @property def list_analyses( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py index be8c70ff77d9..30e7ff857b46 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py @@ -45,16 +45,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -90,11 +100,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -157,16 +169,26 @@ "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "ListAnalysesRequest", "ListAnalysesResponse", "ListConversationsRequest", @@ -201,11 +223,13 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py index 60816f4c061f..1229fb10453f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py @@ -71,6 +71,12 @@ "UndeployIssueModelRequest", "UndeployIssueModelResponse", "UndeployIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", + "ExportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", + "ImportIssueModelMetadata", "GetIssueRequest", "ListIssuesRequest", "ListIssuesResponse", @@ -86,6 +92,10 @@ "UpdatePhraseMatcherRequest", "GetSettingsRequest", "UpdateSettingsRequest", + "GetEncryptionSpecRequest", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", + "InitializeEncryptionSpecMetadata", "CreateViewRequest", "GetViewRequest", "ListViewsRequest", @@ -394,7 +404,7 @@ class UploadConversationRequest(proto.Message): class UploadConversationMetadata(proto.Message): - r"""The metadata for an UploadConversation operation. + r"""The metadata for an ``UploadConversation`` operation. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -449,9 +459,9 @@ class ListConversationsRequest(proto.Message): page_size (int): The maximum number of conversations to return in the response. A valid page size ranges from 0 - to 1,000 inclusive. If the page size is zero or - unspecified, a default page size of 100 will be - chosen. Note that a call might return fewer + to 100,000 inclusive. If the page size is zero + or unspecified, a default page size of 100 will + be chosen. Note that a call might return fewer results than the requested page size. page_token (str): The value returned by the last @@ -462,6 +472,23 @@ class ListConversationsRequest(proto.Message): A filter to reduce results to a specific subset. Useful for querying conversations with specific properties. + order_by (str): + Optional. The attribute by which to order conversations in + the response. If empty, conversations will be ordered by + descending creation time. Supported values are one of the + following: + + - create_time + - customer_satisfaction_rating + - duration + - latest_analysis + - start_time + - turn_count + + The default sort order is ascending. To specify order, + append ``asc`` or ``desc`` (``create_time desc``). For more + details, see `Google AIPs + Ordering `__. view (google.cloud.contact_center_insights_v1.types.ConversationView): The level of details of the conversation. Default is ``BASIC``. @@ -483,6 +510,10 @@ class ListConversationsRequest(proto.Message): proto.STRING, number=4, ) + order_by: str = proto.Field( + proto.STRING, + number=7, + ) view: "ConversationView" = proto.Field( proto.ENUM, number=5, @@ -549,7 +580,20 @@ class UpdateConversationRequest(proto.Message): Required. The new values for the conversation. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. + The list of fields to be updated. All possible fields can be + updated by passing ``*``, or a subset of the following + updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` """ conversation: resources.Conversation = proto.Field( @@ -619,11 +663,22 @@ class IngestConversationsRequest(proto.Message): Optional. Default Speech-to-Text configuration. Optional, will default to the config specified in Settings. + sample_size (int): + Optional. If set, this fields indicates the + number of objects to ingest from the Cloud + Storage bucket. If empty, the entire bucket will + be ingested. Unless they are first deleted, + conversations produced through sampling won't be + ingested by subsequent ingest requests. + + This field is a member of `oneof`_ ``_sample_size``. """ class GcsSource(proto.Message): r"""Configuration for Cloud Storage bucket sources. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: bucket_uri (str): Required. The Cloud Storage bucket containing @@ -631,6 +686,22 @@ class GcsSource(proto.Message): bucket_object_type (google.cloud.contact_center_insights_v1.types.IngestConversationsRequest.GcsSource.BucketObjectType): Optional. Specifies the type of the objects in ``bucket_uri``. + metadata_bucket_uri (str): + Optional. The Cloud Storage path to the conversation + metadata. Note that: [1] Metadata files are expected to be + in JSON format. [2] Metadata and source files (transcripts + or audio) must be in separate buckets. [3] A source file and + its corresponding metadata file must share the same name to + be properly ingested, E.g. + ``gs://bucket/audio/conversation1.mp3`` and + ``gs://bucket/metadata/conversation1.json``. + + This field is a member of `oneof`_ ``_metadata_bucket_uri``. + custom_metadata_keys (MutableSequence[str]): + Optional. Custom keys to extract as conversation labels from + metadata files in ``metadata_bucket_uri``. Keys not included + in this field will be ignored. Note that there is a limit of + 20 labels per conversation. """ class BucketObjectType(proto.Enum): @@ -660,6 +731,15 @@ class BucketObjectType(proto.Enum): enum="IngestConversationsRequest.GcsSource.BucketObjectType", ) ) + metadata_bucket_uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + custom_metadata_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) class TranscriptObjectConfig(proto.Message): r"""Configuration for processing transcript objects. @@ -681,8 +761,10 @@ class ConversationConfig(proto.Message): Attributes: agent_id (str): - An opaque, user-specified string representing - the human agent who handled the conversations. + Optional. An opaque, user-specified string representing a + human agent who handled all conversations in the import. + Note that this will be overridden if per-conversation + metadata is provided through the ``metadata_bucket_uri``. agent_channel (int): Optional. Indicates which of the channels, 1 or 2, contains the agent. Note that this must be @@ -739,6 +821,11 @@ class ConversationConfig(proto.Message): number=6, message=resources.SpeechConfig, ) + sample_size: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) class IngestConversationsMetadata(proto.Message): @@ -1559,6 +1646,163 @@ class UndeployIssueModelMetadata(proto.Message): ) +class ExportIssueModelRequest(proto.Message): + r"""Request to export an issue model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_destination (google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest.GcsDestination): + Google Cloud Storage URI to export the issue + model to. + + This field is a member of `oneof`_ ``Destination``. + name (str): + Required. The issue model to export. + """ + + class GcsDestination(proto.Message): + r"""Google Cloud Storage Object URI to save the issue model to. + + Attributes: + object_uri (str): + Required. Format: ``gs:///`` + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + gcs_destination: GcsDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="Destination", + message=GcsDestination, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportIssueModelResponse(proto.Message): + r"""Response from export issue model""" + + +class ExportIssueModelMetadata(proto.Message): + r"""Metadata used for export issue model. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + request (google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest): + The original export request. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "ExportIssueModelRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="ExportIssueModelRequest", + ) + + +class ImportIssueModelRequest(proto.Message): + r"""Request to import an issue model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_source (google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest.GcsSource): + Google Cloud Storage source message. + + This field is a member of `oneof`_ ``Source``. + parent (str): + Required. The parent resource of the issue + model. + create_new_model (bool): + Optional. If set to true, will create an + issue model from the imported file with randomly + generated IDs for the issue model and + corresponding issues. Otherwise, replaces an + existing model with the same ID as the file. + """ + + class GcsSource(proto.Message): + r"""Google Cloud Storage Object URI to get the issue model file + from. + + Attributes: + object_uri (str): + Required. Format: ``gs:///`` + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + gcs_source: GcsSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="Source", + message=GcsSource, + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + create_new_model: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ImportIssueModelResponse(proto.Message): + r"""Response from import issue model""" + + +class ImportIssueModelMetadata(proto.Message): + r"""Metadata used for import issue model. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + request (google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest): + The original import request. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "ImportIssueModelRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="ImportIssueModelRequest", + ) + + class GetIssueRequest(proto.Message): r"""The request to get an issue. @@ -1855,6 +2099,90 @@ class UpdateSettingsRequest(proto.Message): ) +class GetEncryptionSpecRequest(proto.Message): + r"""The request to get location-level encryption specification. + + Attributes: + name (str): + Required. The name of the encryption spec + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InitializeEncryptionSpecRequest(proto.Message): + r"""The request to initialize a location-level encryption + specification. + + Attributes: + encryption_spec (google.cloud.contact_center_insights_v1.types.EncryptionSpec): + Required. The encryption spec used for CMEK encryption. It + is required that the kms key is in the same region as the + endpoint. The same key will be used for all provisioned + resources, if encryption is available. If the kms_key_name + is left empty, no encryption will be enforced. + """ + + encryption_spec: resources.EncryptionSpec = proto.Field( + proto.MESSAGE, + number=1, + message=resources.EncryptionSpec, + ) + + +class InitializeEncryptionSpecResponse(proto.Message): + r"""The response to initialize a location-level encryption + specification. + + """ + + +class InitializeEncryptionSpecMetadata(proto.Message): + r"""Metadata for initializing a location-level encryption + specification. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + request (google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest): + Output only. The original request for + initialization. + partial_errors (MutableSequence[google.rpc.status_pb2.Status]): + Partial errors during initialising operation + that might cause the operation output to be + incomplete. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "InitializeEncryptionSpecRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="InitializeEncryptionSpecRequest", + ) + partial_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + class CreateViewRequest(proto.Message): r"""The request to create a view. diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py index 0302667ebc6e..83de2aec7b7e 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py @@ -32,6 +32,7 @@ "AnalysisResult", "IssueModelResult", "ConversationLevelSentiment", + "ConversationLevelSilence", "IssueAssignment", "CallAnnotation", "AnnotationBoundary", @@ -55,6 +56,7 @@ "PhraseMatchRuleConfig", "ExactMatchConfig", "Settings", + "EncryptionSpec", "RedactionConfig", "SpeechConfig", "RuntimeAnnotation", @@ -129,6 +131,11 @@ class Conversation(proto.Message): quality_metadata (google.cloud.contact_center_insights_v1.types.Conversation.QualityMetadata): Conversation metadata related to quality management. + metadata_json (str): + Input only. JSON Metadata encoded as a + string. This field is primarily used by Insights + integrations with various telphony systems and + must be in one of Insights' supported formats. transcript (google.cloud.contact_center_insights_v1.types.Conversation.Transcript): Output only. The conversation transcript. medium (google.cloud.contact_center_insights_v1.types.Conversation.Medium): @@ -482,6 +489,10 @@ class DialogflowSegmentMetadata(proto.Message): number=24, message=QualityMetadata, ) + metadata_json: str = proto.Field( + proto.STRING, + number=25, + ) transcript: Transcript = proto.Field( proto.MESSAGE, number=8, @@ -691,6 +702,9 @@ class CallAnalysisMetadata(proto.Message): sentiments (MutableSequence[google.cloud.contact_center_insights_v1.types.ConversationLevelSentiment]): Overall conversation-level sentiment for each channel of the call. + silence (google.cloud.contact_center_insights_v1.types.ConversationLevelSilence): + Overall conversation-level silence during the + call. intents (MutableMapping[str, google.cloud.contact_center_insights_v1.types.Intent]): All the matched intents in the call. phrase_matchers (MutableMapping[str, google.cloud.contact_center_insights_v1.types.PhraseMatchData]): @@ -716,6 +730,11 @@ class CallAnalysisMetadata(proto.Message): number=4, message="ConversationLevelSentiment", ) + silence: "ConversationLevelSilence" = proto.Field( + proto.MESSAGE, + number=11, + message="ConversationLevelSilence", + ) intents: MutableMapping[str, "Intent"] = proto.MapField( proto.STRING, proto.MESSAGE, @@ -791,6 +810,28 @@ class ConversationLevelSentiment(proto.Message): ) +class ConversationLevelSilence(proto.Message): + r"""Conversation-level silence data. + + Attributes: + silence_duration (google.protobuf.duration_pb2.Duration): + Amount of time calculated to be in silence. + silence_percentage (float): + Percentage of the total conversation spent in + silence. + """ + + silence_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + silence_percentage: float = proto.Field( + proto.FLOAT, + number=2, + ) + + class IssueAssignment(proto.Message): r"""Information about the issue. @@ -1456,6 +1497,8 @@ class Issue(proto.Message): Output only. Resource names of the sample representative utterances that match to this issue. + display_description (str): + Representative description of the issue. """ name: str = proto.Field( @@ -1480,6 +1523,10 @@ class Issue(proto.Message): proto.STRING, number=6, ) + display_description: str = proto.Field( + proto.STRING, + number=14, + ) class IssueModelLabelStats(proto.Message): @@ -1764,7 +1811,11 @@ class ExactMatchConfig(proto.Message): class Settings(proto.Message): - r"""The settings resource. + r"""The CCAI Insights project wide settings. Use these settings to + configure the behavior of Insights. View these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. Attributes: name (str): @@ -1807,21 +1858,30 @@ class Settings(proto.Message): created. - "export-insights-data": Notify each time an export is complete. + - "ingest-conversations": Notify each time an + IngestConversations LRO is complete. - "update-conversation": Notify each time a conversation is updated via UpdateConversation. + - "upload-conversation": Notify when an UploadConversation + LRO is complete. Values are Pub/Sub topics. The format of each Pub/Sub topic is: projects/{project}/topics/{topic} analysis_config (google.cloud.contact_center_insights_v1.types.Settings.AnalysisConfig): Default analysis settings. redaction_config (google.cloud.contact_center_insights_v1.types.RedactionConfig): - Default DLP redaction resources to be applied - while ingesting conversations. + Default DLP redaction resources to be applied while + ingesting conversations. This applies to conversations + ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversations + coming from CCAI Platform. speech_config (google.cloud.contact_center_insights_v1.types.SpeechConfig): - Optional. Default Speech-to-Text resources to - be used while ingesting audio files. Optional, - CCAI Insights will create a default if not - provided. + Optional. Default Speech-to-Text resources to use while + ingesting audio files. Optional, CCAI Insights will create a + default if not provided. This applies to conversations + ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversations + coming from CCAI Platform. """ class AnalysisConfig(proto.Message): @@ -1900,9 +1960,44 @@ class AnalysisConfig(proto.Message): ) +class EncryptionSpec(proto.Message): + r"""A customer-managed encryption key specification that can be + applied to all created resources (e.g. Conversation). + + Attributes: + name (str): + Immutable. The resource name of the + encryption key specification resource. Format: + + projects/{project}/locations/{location}/encryptionSpec + kms_key (str): + Required. The name of customer-managed encryption key that + is used to secure a resource and its sub-resources. If + empty, the resource is secured by the default Google + encryption key. Only the key in the same location as this + resource is allowed to be used for encryption. Format: + ``projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{key}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) + + class RedactionConfig(proto.Message): - r"""DLP resources used for redaction while ingesting - conversations. + r"""DLP resources used for redaction while ingesting conversations. DLP + settings are applied to conversations ingested from the + ``UploadConversation`` and ``IngestConversations`` endpoints, + including conversation coming from CCAI Platform. They are not + applied to conversations ingested from the ``CreateConversation`` + endpoint or the Dialogflow / Agent Assist runtime integrations. When + using Dialogflow / Agent Assist runtime integrations, redaction + should be performed in Dialogflow / Agent Assist. Attributes: deidentify_template (str): @@ -1926,7 +2021,11 @@ class RedactionConfig(proto.Message): class SpeechConfig(proto.Message): - r"""Speech-to-Text configuration. + r"""Speech-to-Text configuration. Speech-to-Text settings are applied to + conversations ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversation coming + from CCAI Platform. They are not applied to conversations ingested + from the ``CreateConversation`` endpoint. Attributes: speech_recognizer (str): @@ -1991,8 +2090,56 @@ class RuntimeAnnotation(proto.Message): answer_feedback (google.cloud.contact_center_insights_v1.types.AnswerFeedback): The feedback that the customer has about the answer in ``data``. + user_input (google.cloud.contact_center_insights_v1.types.RuntimeAnnotation.UserInput): + Explicit input used for generating the answer """ + class UserInput(proto.Message): + r"""Explicit input used for generating the answer + + Attributes: + query (str): + Query text. Article Search uses this to store + the input query used to generate the search + results. + generator_name (str): + The resource name of associated generator. Format: + ``projects//locations//generators/`` + query_source (google.cloud.contact_center_insights_v1.types.RuntimeAnnotation.UserInput.QuerySource): + Query source for the answer. + """ + + class QuerySource(proto.Enum): + r"""The source of the query. + + Values: + QUERY_SOURCE_UNSPECIFIED (0): + Unknown query source. + AGENT_QUERY (1): + The query is from agents. + SUGGESTED_QUERY (2): + The query is a query from previous + suggestions, e.g. from a preceding + SuggestKnowledgeAssist response. + """ + QUERY_SOURCE_UNSPECIFIED = 0 + AGENT_QUERY = 1 + SUGGESTED_QUERY = 2 + + query: str = proto.Field( + proto.STRING, + number=1, + ) + generator_name: str = proto.Field( + proto.STRING, + number=2, + ) + query_source: "RuntimeAnnotation.UserInput.QuerySource" = proto.Field( + proto.ENUM, + number=3, + enum="RuntimeAnnotation.UserInput.QuerySource", + ) + article_suggestion: "ArticleSuggestionData" = proto.Field( proto.MESSAGE, number=6, @@ -2055,6 +2202,11 @@ class RuntimeAnnotation(proto.Message): number=5, message="AnswerFeedback", ) + user_input: UserInput = proto.Field( + proto.MESSAGE, + number=16, + message=UserInput, + ) class AnswerFeedback(proto.Message): @@ -2566,9 +2718,12 @@ class SummarizationModel(proto.Enum): Unspecified summarization model. BASELINE_MODEL (1): The CCAI baseline model. + BASELINE_MODEL_V2_0 (2): + The CCAI baseline model, V2.0. """ SUMMARIZATION_MODEL_UNSPECIFIED = 0 BASELINE_MODEL = 1 + BASELINE_MODEL_V2_0 = 2 conversation_profile: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py new file mode 100644 index 000000000000..ec4c508ac2b3 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py new file mode 100644 index 000000000000..b9fa9152f794 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py new file mode 100644 index 000000000000..9217d1773b5f --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py new file mode 100644 index 000000000000..90f3743e6570 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_spec(request=request) + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py new file mode 100644 index 000000000000..e90dd84dc650 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py new file mode 100644 index 000000000000..74ee99de39bc --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py new file mode 100644 index 000000000000..bc5767e0a1c1 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py new file mode 100644 index 000000000000..26a058ab9ae1 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index bb09ad8db4c2..ab3be662543b 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -2798,19 +2798,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_analysis", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.export_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ExportIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetAnalysis" + "shortName": "ExportIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" + "type": "google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest" }, { "name": "name", @@ -2829,22 +2829,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", - "shortName": "get_analysis" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_issue_model" }, - "description": "Sample for GetAnalysis", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py", + "description": "Sample for ExportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2854,22 +2854,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py" }, { "canonical": true, @@ -2878,19 +2878,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_analysis", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.export_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ExportIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetAnalysis" + "shortName": "ExportIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" + "type": "google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest" }, { "name": "name", @@ -2909,22 +2909,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", - "shortName": "get_analysis" + "resultType": "google.api_core.operation.Operation", + "shortName": "export_issue_model" }, - "description": "Sample for GetAnalysis", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py", + "description": "Sample for ExportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2934,22 +2934,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py" }, { "canonical": true, @@ -2959,19 +2959,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_conversation", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_analysis", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetConversation" + "shortName": "GetAnalysis" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" }, { "name": "name", @@ -2990,14 +2990,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", + "shortName": "get_analysis" }, - "description": "Sample for GetConversation", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py", + "description": "Sample for GetAnalysis", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_async", "segments": [ { "end": 51, @@ -3030,7 +3030,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py" }, { "canonical": true, @@ -3039,19 +3039,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_conversation", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_analysis", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetConversation" + "shortName": "GetAnalysis" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" }, { "name": "name", @@ -3070,14 +3070,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", + "shortName": "get_analysis" }, - "description": "Sample for GetConversation", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py", + "description": "Sample for GetAnalysis", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_sync", "segments": [ { "end": 51, @@ -3110,7 +3110,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py" }, { "canonical": true, @@ -3120,19 +3120,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue_model", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_conversation", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssueModel" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" }, { "name": "name", @@ -3151,14 +3151,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", - "shortName": "get_issue_model" + "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for GetIssueModel", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py", + "description": "Sample for GetConversation", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_async", "segments": [ { "end": 51, @@ -3191,7 +3191,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py" }, { "canonical": true, @@ -3200,19 +3200,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue_model", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_conversation", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssueModel" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" }, { "name": "name", @@ -3231,14 +3231,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", - "shortName": "get_issue_model" + "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for GetIssueModel", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py", + "description": "Sample for GetConversation", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_sync", "segments": [ { "end": 51, @@ -3271,7 +3271,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py" }, { "canonical": true, @@ -3281,19 +3281,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_encryption_spec", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetEncryptionSpec", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssue" + "shortName": "GetEncryptionSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest" }, { "name": "name", @@ -3312,14 +3312,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Issue", - "shortName": "get_issue" + "resultType": "google.cloud.contact_center_insights_v1.types.EncryptionSpec", + "shortName": "get_encryption_spec" }, - "description": "Sample for GetIssue", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py", + "description": "Sample for GetEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async", "segments": [ { "end": 51, @@ -3352,7 +3352,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py" }, { "canonical": true, @@ -3361,19 +3361,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_encryption_spec", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetEncryptionSpec", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssue" + "shortName": "GetEncryptionSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest" }, { "name": "name", @@ -3392,14 +3392,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Issue", - "shortName": "get_issue" + "resultType": "google.cloud.contact_center_insights_v1.types.EncryptionSpec", + "shortName": "get_encryption_spec" }, - "description": "Sample for GetIssue", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py", + "description": "Sample for GetEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync", "segments": [ { "end": 51, @@ -3432,7 +3432,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py" }, { "canonical": true, @@ -3442,19 +3442,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_phrase_matcher", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetPhraseMatcher" + "shortName": "GetIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" }, { "name": "name", @@ -3473,14 +3473,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", - "shortName": "get_phrase_matcher" + "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", + "shortName": "get_issue_model" }, - "description": "Sample for GetPhraseMatcher", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py", + "description": "Sample for GetIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_async", "segments": [ { "end": 51, @@ -3513,7 +3513,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py" }, { "canonical": true, @@ -3522,19 +3522,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_phrase_matcher", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetPhraseMatcher" + "shortName": "GetIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" }, { "name": "name", @@ -3553,14 +3553,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", - "shortName": "get_phrase_matcher" + "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", + "shortName": "get_issue_model" }, - "description": "Sample for GetPhraseMatcher", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py", + "description": "Sample for GetIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_sync", "segments": [ { "end": 51, @@ -3593,7 +3593,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py" }, { "canonical": true, @@ -3603,19 +3603,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_settings", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetSettings" + "shortName": "GetIssue" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" }, { "name": "name", @@ -3634,14 +3634,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.contact_center_insights_v1.types.Issue", + "shortName": "get_issue" }, - "description": "Sample for GetSettings", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py", + "description": "Sample for GetIssue", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_async", "segments": [ { "end": 51, @@ -3674,7 +3674,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py" }, { "canonical": true, @@ -3683,19 +3683,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_settings", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetSettings" + "shortName": "GetIssue" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" }, { "name": "name", @@ -3714,14 +3714,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.contact_center_insights_v1.types.Issue", + "shortName": "get_issue" }, - "description": "Sample for GetSettings", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py", + "description": "Sample for GetIssue", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_sync", "segments": [ { "end": 51, @@ -3754,7 +3754,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py" }, { "canonical": true, @@ -3764,19 +3764,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_view", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_phrase_matcher", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetView" + "shortName": "GetPhraseMatcher" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" }, { "name": "name", @@ -3795,14 +3795,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.View", - "shortName": "get_view" + "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", + "shortName": "get_phrase_matcher" }, - "description": "Sample for GetView", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py", + "description": "Sample for GetPhraseMatcher", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_async", "segments": [ { "end": 51, @@ -3835,7 +3835,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py" }, { "canonical": true, @@ -3844,14 +3844,256 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_view", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_phrase_matcher", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetView" + "shortName": "GetPhraseMatcher" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", + "shortName": "get_phrase_matcher" + }, + "description": "Sample for GetPhraseMatcher", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_settings", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_settings", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_view", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetView" }, "parameters": [ { @@ -3859,7 +4101,168 @@ "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" }, { - "name": "name", + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.View", + "shortName": "get_view" + }, + "description": "Sample for GetView", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_view", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.View", + "shortName": "get_view" + }, + "description": "Sample for GetView", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.import_issue_model", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ImportIssueModel", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "ImportIssueModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest" + }, + { + "name": "parent", "type": "str" }, { @@ -3875,22 +4278,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.View", - "shortName": "get_view" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_issue_model" }, - "description": "Sample for GetView", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py", + "description": "Sample for ImportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3900,22 +4303,102 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.import_issue_model", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ImportIssueModel", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "ImportIssueModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_issue_model" + }, + "description": "Sample for ImportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py" }, { "canonical": true, @@ -4078,6 +4561,167 @@ ], "title": "contactcenterinsights_v1_generated_contact_center_insights_ingest_conversations_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.initialize_encryption_spec", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.InitializeEncryptionSpec", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "InitializeEncryptionSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest" + }, + { + "name": "encryption_spec", + "type": "google.cloud.contact_center_insights_v1.types.EncryptionSpec" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "initialize_encryption_spec" + }, + "description": "Sample for InitializeEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.initialize_encryption_spec", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.InitializeEncryptionSpec", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "InitializeEncryptionSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest" + }, + { + "name": "encryption_spec", + "type": "google.cloud.contact_center_insights_v1.types.EncryptionSpec" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "initialize_encryption_spec" + }, + "description": "Sample for InitializeEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py index ca15d27e1fd8..10d05e634fa6 100644 --- a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py +++ b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py @@ -56,16 +56,20 @@ class contact_center_insightsCallTransformer(cst.CSTTransformer): 'delete_view': ('name', ), 'deploy_issue_model': ('name', ), 'export_insights_data': ('parent', 'big_query_destination', 'filter', 'kms_key', 'write_disposition', ), + 'export_issue_model': ('name', 'gcs_destination', ), 'get_analysis': ('name', ), 'get_conversation': ('name', 'view', ), + 'get_encryption_spec': ('name', ), 'get_issue': ('name', ), 'get_issue_model': ('name', ), 'get_phrase_matcher': ('name', ), 'get_settings': ('name', ), 'get_view': ('name', ), - 'ingest_conversations': ('parent', 'gcs_source', 'transcript_object_config', 'conversation_config', 'redaction_config', 'speech_config', ), + 'import_issue_model': ('parent', 'gcs_source', 'create_new_model', ), + 'ingest_conversations': ('parent', 'gcs_source', 'transcript_object_config', 'conversation_config', 'redaction_config', 'speech_config', 'sample_size', ), + 'initialize_encryption_spec': ('encryption_spec', ), 'list_analyses': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'view', ), + 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'list_issue_models': ('parent', ), 'list_issues': ('parent', ), 'list_phrase_matchers': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index 962ffb010308..034282057f4a 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -1230,6 +1230,7 @@ def test_create_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1247,6 +1248,7 @@ def test_create_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -1363,6 +1365,7 @@ async def test_create_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1440,6 +1443,7 @@ async def test_create_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1458,6 +1462,7 @@ async def test_create_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -1983,6 +1988,7 @@ def test_update_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2000,6 +2006,7 @@ def test_update_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2110,6 +2117,7 @@ async def test_update_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2187,6 +2195,7 @@ async def test_update_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2205,6 +2214,7 @@ async def test_update_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2412,6 +2422,7 @@ def test_get_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2429,6 +2440,7 @@ def test_get_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2535,6 +2547,7 @@ async def test_get_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2610,6 +2623,7 @@ async def test_get_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2628,6 +2642,7 @@ async def test_get_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2855,6 +2870,7 @@ def test_list_conversations_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2871,6 +2887,7 @@ def test_list_conversations_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) @@ -9762,11 +9779,11 @@ async def test_undeploy_issue_model_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueRequest, + contact_center_insights.ExportIssueModelRequest, dict, ], ) -def test_get_issue(request_type, transport: str = "grpc"): +def test_export_issue_model(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9777,29 +9794,24 @@ def test_get_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) - response = client.get_issue(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance(response, future.Future) -def test_get_issue_empty_call(): +def test_export_issue_model_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -9808,17 +9820,19 @@ def test_get_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_issue() + client.export_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest() + assert args[0] == contact_center_insights.ExportIssueModelRequest() -def test_get_issue_non_empty_request_with_auto_populated_field(): +def test_export_issue_model_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -9829,24 +9843,26 @@ def test_get_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetIssueRequest( + request = contact_center_insights.ExportIssueModelRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_issue(request=request) + client.export_issue_model(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest( + assert args[0] == contact_center_insights.ExportIssueModelRequest( name="name_value", ) -def test_get_issue_use_cached_wrapped_rpc(): +def test_export_issue_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9860,21 +9876,30 @@ def test_get_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue in client._transport._wrapped_methods + assert ( + client._transport.export_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_issue_model + ] = mock_rpc request = {} - client.get_issue(request) + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9882,7 +9907,7 @@ def test_get_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_issue_empty_call_async(): +async def test_export_issue_model_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -9891,23 +9916,23 @@ async def test_get_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_issue() + response = await client.export_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest() + assert args[0] == contact_center_insights.ExportIssueModelRequest() @pytest.mark.asyncio -async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_export_issue_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9922,7 +9947,7 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.get_issue + client._client._transport.export_issue_model in client._client._transport._wrapped_methods ) @@ -9930,16 +9955,21 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_issue + client._client._transport.export_issue_model ] = mock_rpc request = {} - await client.get_issue(request) + await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_issue(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9947,9 +9977,9 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_get_issue_async( +async def test_export_issue_model_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetIssueRequest, + request_type=contact_center_insights.ExportIssueModelRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9961,50 +9991,47 @@ async def test_get_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_issue(request) + response = await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_issue_async_from_dict(): - await test_get_issue_async(request_type=dict) +async def test_export_issue_model_async_from_dict(): + await test_export_issue_model_async(request_type=dict) -def test_get_issue_field_headers(): +def test_export_issue_model_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: - call.return_value = resources.Issue() - client.get_issue(request) + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10020,21 +10047,25 @@ def test_get_issue_field_headers(): @pytest.mark.asyncio -async def test_get_issue_field_headers_async(): +async def test_export_issue_model_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) - await client.get_issue(request) + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10049,18 +10080,20 @@ async def test_get_issue_field_headers_async(): ) in kw["metadata"] -def test_get_issue_flattened(): +def test_export_issue_model_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_issue( + client.export_issue_model( name="name_value", ) @@ -10073,7 +10106,7 @@ def test_get_issue_flattened(): assert arg == mock_val -def test_get_issue_flattened_error(): +def test_export_issue_model_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10081,27 +10114,31 @@ def test_get_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue( - contact_center_insights.GetIssueRequest(), + client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_issue_flattened_async(): +async def test_export_issue_model_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_issue( + response = await client.export_issue_model( name="name_value", ) @@ -10115,7 +10152,7 @@ async def test_get_issue_flattened_async(): @pytest.mark.asyncio -async def test_get_issue_flattened_error_async(): +async def test_export_issue_model_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10123,8 +10160,8 @@ async def test_get_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_issue( - contact_center_insights.GetIssueRequest(), + await client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), name="name_value", ) @@ -10132,11 +10169,11 @@ async def test_get_issue_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssuesRequest, + contact_center_insights.ImportIssueModelRequest, dict, ], ) -def test_list_issues(request_type, transport: str = "grpc"): +def test_import_issue_model(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10147,22 +10184,24 @@ def test_list_issues(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() - response = client.list_issues(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert isinstance(response, future.Future) -def test_list_issues_empty_call(): +def test_import_issue_model_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10171,17 +10210,19 @@ def test_list_issues_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_issues() + client.import_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest() + assert args[0] == contact_center_insights.ImportIssueModelRequest() -def test_list_issues_non_empty_request_with_auto_populated_field(): +def test_import_issue_model_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10192,24 +10233,26 @@ def test_list_issues_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListIssuesRequest( + request = contact_center_insights.ImportIssueModelRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_issues(request=request) + client.import_issue_model(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest( + assert args[0] == contact_center_insights.ImportIssueModelRequest( parent="parent_value", ) -def test_list_issues_use_cached_wrapped_rpc(): +def test_import_issue_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10223,21 +10266,30 @@ def test_list_issues_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issues in client._transport._wrapped_methods + assert ( + client._transport.import_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_issue_model + ] = mock_rpc request = {} - client.list_issues(request) + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issues(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10245,7 +10297,7 @@ def test_list_issues_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_issues_empty_call_async(): +async def test_import_issue_model_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10254,19 +10306,21 @@ async def test_list_issues_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_issues() + response = await client.import_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest() + assert args[0] == contact_center_insights.ImportIssueModelRequest() @pytest.mark.asyncio -async def test_list_issues_async_use_cached_wrapped_rpc( +async def test_import_issue_model_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10283,7 +10337,7 @@ async def test_list_issues_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_issues + client._client._transport.import_issue_model in client._client._transport._wrapped_methods ) @@ -10291,16 +10345,21 @@ async def test_list_issues_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_issues + client._client._transport.import_issue_model ] = mock_rpc request = {} - await client.list_issues(request) + await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_issues(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10308,9 +10367,9 @@ async def test_list_issues_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_issues_async( +async def test_import_issue_model_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListIssuesRequest, + request_type=contact_center_insights.ImportIssueModelRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10322,43 +10381,47 @@ async def test_list_issues_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_issues(request) + response = await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_issues_async_from_dict(): - await test_list_issues_async(request_type=dict) +async def test_import_issue_model_async_from_dict(): + await test_import_issue_model_async(request_type=dict) -def test_list_issues_field_headers(): +def test_import_issue_model_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: - call.return_value = contact_center_insights.ListIssuesResponse() - client.list_issues(request) + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10374,23 +10437,25 @@ def test_list_issues_field_headers(): @pytest.mark.asyncio -async def test_list_issues_field_headers_async(): +async def test_import_issue_model_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_issues(request) + await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10405,18 +10470,20 @@ async def test_list_issues_field_headers_async(): ) in kw["metadata"] -def test_list_issues_flattened(): +def test_import_issue_model_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_issues( + client.import_issue_model( parent="parent_value", ) @@ -10429,7 +10496,7 @@ def test_list_issues_flattened(): assert arg == mock_val -def test_list_issues_flattened_error(): +def test_import_issue_model_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10437,29 +10504,31 @@ def test_list_issues_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issues( - contact_center_insights.ListIssuesRequest(), + client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_issues_flattened_async(): +async def test_import_issue_model_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_issues( + response = await client.import_issue_model( parent="parent_value", ) @@ -10473,7 +10542,7 @@ async def test_list_issues_flattened_async(): @pytest.mark.asyncio -async def test_list_issues_flattened_error_async(): +async def test_import_issue_model_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10481,8 +10550,8 @@ async def test_list_issues_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_issues( - contact_center_insights.ListIssuesRequest(), + await client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), parent="parent_value", ) @@ -10490,11 +10559,11 @@ async def test_list_issues_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateIssueRequest, + contact_center_insights.GetIssueRequest, dict, ], ) -def test_update_issue(request_type, transport: str = "grpc"): +def test_get_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10505,19 +10574,20 @@ def test_update_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) - response = client.update_issue(request) + response = client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10525,9 +10595,10 @@ def test_update_issue(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_update_issue_empty_call(): +def test_get_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10536,17 +10607,17 @@ def test_update_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_issue() + client.get_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest() -def test_update_issue_non_empty_request_with_auto_populated_field(): +def test_get_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10557,20 +10628,24 @@ def test_update_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_issue(request=request) + client.get_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest( + name="name_value", + ) -def test_update_issue_use_cached_wrapped_rpc(): +def test_get_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10584,21 +10659,21 @@ def test_update_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_issue in client._transport._wrapped_methods + assert client._transport.get_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc request = {} - client.update_issue(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue(request) + client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10606,7 +10681,7 @@ def test_update_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_issue_empty_call_async(): +async def test_get_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10615,25 +10690,24 @@ async def test_update_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) ) - response = await client.update_issue() + response = await client.get_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest() @pytest.mark.asyncio -async def test_update_issue_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10648,7 +10722,7 @@ async def test_update_issue_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_issue + client._client._transport.get_issue in client._client._transport._wrapped_methods ) @@ -10656,16 +10730,16 @@ async def test_update_issue_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_issue + client._client._transport.get_issue ] = mock_rpc request = {} - await client.update_issue(request) + await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_issue(request) + await client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10673,9 +10747,9 @@ async def test_update_issue_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_issue_async( +async def test_get_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateIssueRequest, + request_type=contact_center_insights.GetIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10687,21 +10761,22 @@ async def test_update_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) ) - response = await client.update_issue(request) + response = await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10709,28 +10784,29 @@ async def test_update_issue_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" @pytest.mark.asyncio -async def test_update_issue_async_from_dict(): - await test_update_issue_async(request_type=dict) +async def test_get_issue_async_from_dict(): + await test_get_issue_async(request_type=dict) -def test_update_issue_field_headers(): +def test_get_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() - request.issue.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value = resources.Issue() - client.update_issue(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10741,26 +10817,26 @@ def test_update_issue_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_issue_field_headers_async(): +async def test_get_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() - request.issue.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) - await client.update_issue(request) + await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10771,39 +10847,35 @@ async def test_update_issue_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_issue_flattened(): +def test_get_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_issue( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].issue - mock_val = resources.Issue(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_issue_flattened_error(): +def test_get_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10811,46 +10883,41 @@ def test_update_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_issue( + contact_center_insights.GetIssueRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_issue_flattened_async(): +async def test_get_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_issue( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].issue - mock_val = resources.Issue(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_issue_flattened_error_async(): +async def test_get_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10858,21 +10925,20 @@ async def test_update_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_issue( + contact_center_insights.GetIssueRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueRequest, + contact_center_insights.ListIssuesRequest, dict, ], ) -def test_delete_issue(request_type, transport: str = "grpc"): +def test_list_issues(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10883,22 +10949,22 @@ def test_delete_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_issue(request) + call.return_value = contact_center_insights.ListIssuesResponse() + response = client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.ListIssuesResponse) -def test_delete_issue_empty_call(): +def test_list_issues_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10907,17 +10973,17 @@ def test_delete_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_issue() + client.list_issues() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest() + assert args[0] == contact_center_insights.ListIssuesRequest() -def test_delete_issue_non_empty_request_with_auto_populated_field(): +def test_list_issues_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10928,24 +10994,24 @@ def test_delete_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeleteIssueRequest( - name="name_value", + request = contact_center_insights.ListIssuesRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_issue(request=request) + client.list_issues(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest( - name="name_value", + assert args[0] == contact_center_insights.ListIssuesRequest( + parent="parent_value", ) -def test_delete_issue_use_cached_wrapped_rpc(): +def test_list_issues_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10959,21 +11025,21 @@ def test_delete_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_issue in client._transport._wrapped_methods + assert client._transport.list_issues in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc request = {} - client.delete_issue(request) + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_issue(request) + client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10981,7 +11047,7 @@ def test_delete_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_issue_empty_call_async(): +async def test_list_issues_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10990,17 +11056,19 @@ async def test_delete_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_issue() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + response = await client.list_issues() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest() + assert args[0] == contact_center_insights.ListIssuesRequest() @pytest.mark.asyncio -async def test_delete_issue_async_use_cached_wrapped_rpc( +async def test_list_issues_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11017,7 +11085,7 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_issue + client._client._transport.list_issues in client._client._transport._wrapped_methods ) @@ -11025,16 +11093,16 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_issue + client._client._transport.list_issues ] = mock_rpc request = {} - await client.delete_issue(request) + await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_issue(request) + await client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11042,9 +11110,9 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_issue_async( +async def test_list_issues_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeleteIssueRequest, + request_type=contact_center_insights.ListIssuesRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11056,41 +11124,43 @@ async def test_delete_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_issue(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + response = await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.ListIssuesResponse) @pytest.mark.asyncio -async def test_delete_issue_async_from_dict(): - await test_delete_issue_async(request_type=dict) +async def test_list_issues_async_from_dict(): + await test_list_issues_async(request_type=dict) -def test_delete_issue_field_headers(): +def test_list_issues_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: - call.return_value = None - client.delete_issue(request) + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + call.return_value = contact_center_insights.ListIssuesResponse() + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11101,26 +11171,28 @@ def test_delete_issue_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_issue_field_headers_async(): +async def test_list_issues_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_issue(request) + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11131,35 +11203,35 @@ async def test_delete_issue_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_issue_flattened(): +def test_list_issues_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = contact_center_insights.ListIssuesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_issue( - name="name_value", + client.list_issues( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_issue_flattened_error(): +def test_list_issues_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11167,41 +11239,43 @@ def test_delete_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_issue_flattened_async(): +async def test_list_issues_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = contact_center_insights.ListIssuesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_issue( - name="name_value", + response = await client.list_issues( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_issue_flattened_error_async(): +async def test_list_issues_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11209,20 +11283,20 @@ async def test_delete_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + await client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CalculateIssueModelStatsRequest, + contact_center_insights.UpdateIssueRequest, dict, ], ) -def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): +def test_update_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11233,26 +11307,31 @@ def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - response = client.calculate_issue_model_stats(request) + call.return_value = resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) + response = client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_calculate_issue_model_stats_empty_call(): +def test_update_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -11261,19 +11340,17 @@ def test_calculate_issue_model_stats_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_issue_model_stats() + client.update_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() + assert args[0] == contact_center_insights.UpdateIssueRequest() -def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field(): +def test_update_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -11284,26 +11361,20 @@ def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CalculateIssueModelStatsRequest( - issue_model="issue_model_value", - ) + request = contact_center_insights.UpdateIssueRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_issue_model_stats(request=request) + client.update_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest( - issue_model="issue_model_value", - ) + assert args[0] == contact_center_insights.UpdateIssueRequest() -def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): +def test_update_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11317,26 +11388,21 @@ def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.calculate_issue_model_stats - in client._transport._wrapped_methods - ) + assert client._transport.update_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.calculate_issue_model_stats - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc request = {} - client.calculate_issue_model_stats(request) + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_issue_model_stats(request) + client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11344,7 +11410,7 @@ def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_calculate_issue_model_stats_empty_call_async(): +async def test_update_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -11353,21 +11419,24 @@ async def test_calculate_issue_model_stats_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() + resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) ) - response = await client.calculate_issue_model_stats() + response = await client.update_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() + assert args[0] == contact_center_insights.UpdateIssueRequest() @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( +async def test_update_issue_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11384,7 +11453,7 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.calculate_issue_model_stats + client._client._transport.update_issue in client._client._transport._wrapped_methods ) @@ -11392,16 +11461,16 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.calculate_issue_model_stats + client._client._transport.update_issue ] = mock_rpc request = {} - await client.calculate_issue_model_stats(request) + await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.calculate_issue_model_stats(request) + await client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11409,9 +11478,9 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async( +async def test_update_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CalculateIssueModelStatsRequest, + request_type=contact_center_insights.UpdateIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11423,49 +11492,52 @@ async def test_calculate_issue_model_stats_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() + resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) ) - response = await client.calculate_issue_model_stats(request) + response = await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async_from_dict(): - await test_calculate_issue_model_stats_async(request_type=dict) +async def test_update_issue_async_from_dict(): + await test_update_issue_async(request_type=dict) -def test_calculate_issue_model_stats_field_headers(): +def test_update_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() - request.issue_model = "issue_model_value" + request.issue.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - client.calculate_issue_model_stats(request) + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + call.return_value = resources.Issue() + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11476,30 +11548,26 @@ def test_calculate_issue_model_stats_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue_model=issue_model_value", + "issue.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_calculate_issue_model_stats_field_headers_async(): +async def test_update_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() - request.issue_model = "issue_model_value" + request.issue.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() - ) - await client.calculate_issue_model_stats(request) + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) + await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11510,37 +11578,39 @@ async def test_calculate_issue_model_stats_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue_model=issue_model_value", + "issue.name=name_value", ) in kw["metadata"] -def test_calculate_issue_model_stats_flattened(): +def test_update_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + call.return_value = resources.Issue() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.calculate_issue_model_stats( - issue_model="issue_model_value", + client.update_issue( + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].issue_model - mock_val = "issue_model_value" + arg = args[0].issue + mock_val = resources.Issue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_calculate_issue_model_stats_flattened_error(): +def test_update_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11548,45 +11618,46 @@ def test_calculate_issue_model_stats_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_calculate_issue_model_stats_flattened_async(): +async def test_update_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + call.return_value = resources.Issue() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.calculate_issue_model_stats( - issue_model="issue_model_value", + response = await client.update_issue( + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].issue_model - mock_val = "issue_model_value" + arg = args[0].issue + mock_val = resources.Issue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_calculate_issue_model_stats_flattened_error_async(): +async def test_update_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11594,20 +11665,21 @@ async def test_calculate_issue_model_stats_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", - ) + await client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreatePhraseMatcherRequest, + contact_center_insights.DeleteIssueRequest, dict, ], ) -def test_create_phrase_matcher(request_type, transport: str = "grpc"): +def test_delete_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11618,39 +11690,22 @@ def test_create_phrase_matcher(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - response = client.create_phrase_matcher(request) + call.return_value = None + response = client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None -def test_create_phrase_matcher_empty_call(): +def test_delete_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -11659,19 +11714,17 @@ def test_create_phrase_matcher_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_phrase_matcher() + client.delete_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() + assert args[0] == contact_center_insights.DeleteIssueRequest() -def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_delete_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -11682,26 +11735,24 @@ def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CreatePhraseMatcherRequest( - parent="parent_value", + request = contact_center_insights.DeleteIssueRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_phrase_matcher(request=request) + client.delete_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest( - parent="parent_value", + assert args[0] == contact_center_insights.DeleteIssueRequest( + name="name_value", ) -def test_create_phrase_matcher_use_cached_wrapped_rpc(): +def test_delete_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11715,26 +11766,21 @@ def test_create_phrase_matcher_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.delete_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc request = {} - client.create_phrase_matcher(request) + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_phrase_matcher(request) + client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11742,7 +11788,7 @@ def test_create_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_phrase_matcher_empty_call_async(): +async def test_delete_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -11751,29 +11797,17 @@ async def test_create_phrase_matcher_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - ) - response = await client.create_phrase_matcher() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() + assert args[0] == contact_center_insights.DeleteIssueRequest() @pytest.mark.asyncio -async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_delete_issue_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11790,7 +11824,7 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_phrase_matcher + client._client._transport.delete_issue in client._client._transport._wrapped_methods ) @@ -11798,16 +11832,16 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_phrase_matcher + client._client._transport.delete_issue ] = mock_rpc request = {} - await client.create_phrase_matcher(request) + await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_phrase_matcher(request) + await client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11815,9 +11849,9 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_phrase_matcher_async( +async def test_delete_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CreatePhraseMatcherRequest, + request_type=contact_center_insights.DeleteIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11829,62 +11863,41 @@ async def test_create_phrase_matcher_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - ) - response = await client.create_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None @pytest.mark.asyncio -async def test_create_phrase_matcher_async_from_dict(): - await test_create_phrase_matcher_async(request_type=dict) +async def test_delete_issue_async_from_dict(): + await test_delete_issue_async(request_type=dict) -def test_create_phrase_matcher_field_headers(): +def test_delete_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: - call.return_value = resources.PhraseMatcher() - client.create_phrase_matcher(request) + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + call.return_value = None + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11895,30 +11908,26 @@ def test_create_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_phrase_matcher_field_headers_async(): +async def test_delete_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() - ) - await client.create_phrase_matcher(request) + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11929,41 +11938,35 @@ async def test_create_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_phrase_matcher_flattened(): +def test_delete_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_phrase_matcher( - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + client.delete_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_phrase_matcher_flattened_error(): +def test_delete_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11971,50 +11974,41 @@ def test_create_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_phrase_matcher_flattened_async(): +async def test_delete_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_phrase_matcher( - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + response = await client.delete_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_phrase_matcher_flattened_error_async(): +async def test_delete_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12022,21 +12016,20 @@ async def test_create_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + await client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetPhraseMatcherRequest, + contact_center_insights.CalculateIssueModelStatsRequest, dict, ], ) -def test_get_phrase_matcher(request_type, transport: str = "grpc"): +def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12048,38 +12041,25 @@ def test_get_phrase_matcher(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - response = client.get_phrase_matcher(request) + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + response = client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) -def test_get_phrase_matcher_empty_call(): +def test_calculate_issue_model_stats_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -12089,18 +12069,18 @@ def test_get_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_phrase_matcher() + client.calculate_issue_model_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() -def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -12111,26 +12091,26 @@ def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetPhraseMatcherRequest( - name="name_value", + request = contact_center_insights.CalculateIssueModelStatsRequest( + issue_model="issue_model_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_phrase_matcher(request=request) + client.calculate_issue_model_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest( - name="name_value", + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest( + issue_model="issue_model_value", ) -def test_get_phrase_matcher_use_cached_wrapped_rpc(): +def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12145,7 +12125,8 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_phrase_matcher in client._transport._wrapped_methods + client._transport.calculate_issue_model_stats + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12154,15 +12135,15 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_phrase_matcher + client._transport.calculate_issue_model_stats ] = mock_rpc request = {} - client.get_phrase_matcher(request) + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_phrase_matcher(request) + client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12170,7 +12151,7 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_phrase_matcher_empty_call_async(): +async def test_calculate_issue_model_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -12180,28 +12161,20 @@ async def test_get_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + contact_center_insights.CalculateIssueModelStatsResponse() ) - response = await client.get_phrase_matcher() + response = await client.calculate_issue_model_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() @pytest.mark.asyncio -async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12218,7 +12191,7 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_phrase_matcher + client._client._transport.calculate_issue_model_stats in client._client._transport._wrapped_methods ) @@ -12226,16 +12199,16 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_phrase_matcher + client._client._transport.calculate_issue_model_stats ] = mock_rpc request = {} - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12243,9 +12216,9 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_phrase_matcher_async( +async def test_calculate_issue_model_stats_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetPhraseMatcherRequest, + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12258,61 +12231,48 @@ async def test_get_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + contact_center_insights.CalculateIssueModelStatsResponse() ) - response = await client.get_phrase_matcher(request) + response = await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) @pytest.mark.asyncio -async def test_get_phrase_matcher_async_from_dict(): - await test_get_phrase_matcher_async(request_type=dict) +async def test_calculate_issue_model_stats_async_from_dict(): + await test_calculate_issue_model_stats_async(request_type=dict) -def test_get_phrase_matcher_field_headers(): +def test_calculate_issue_model_stats_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() - request.name = "name_value" + request.issue_model = "issue_model_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: - call.return_value = resources.PhraseMatcher() - client.get_phrase_matcher(request) + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12323,30 +12283,30 @@ def test_get_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "issue_model=issue_model_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_phrase_matcher_field_headers_async(): +async def test_calculate_issue_model_stats_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() - request.name = "name_value" + request.issue_model = "issue_model_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.CalculateIssueModelStatsResponse() ) - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12357,37 +12317,37 @@ async def test_get_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "issue_model=issue_model_value", ) in kw["metadata"] -def test_get_phrase_matcher_flattened(): +def test_calculate_issue_model_stats_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_phrase_matcher( - name="name_value", + client.calculate_issue_model_stats( + issue_model="issue_model_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].issue_model + mock_val = "issue_model_value" assert arg == mock_val -def test_get_phrase_matcher_flattened_error(): +def test_calculate_issue_model_stats_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12395,45 +12355,45 @@ def test_get_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), - name="name_value", + client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) @pytest.mark.asyncio -async def test_get_phrase_matcher_flattened_async(): +async def test_calculate_issue_model_stats_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.CalculateIssueModelStatsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_phrase_matcher( - name="name_value", + response = await client.calculate_issue_model_stats( + issue_model="issue_model_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].issue_model + mock_val = "issue_model_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_phrase_matcher_flattened_error_async(): +async def test_calculate_issue_model_stats_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12441,20 +12401,20 @@ async def test_get_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), - name="name_value", + await client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListPhraseMatchersRequest, + contact_center_insights.CreatePhraseMatcherRequest, dict, ], ) -def test_list_phrase_matchers(request_type, transport: str = "grpc"): +def test_create_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12466,26 +12426,38 @@ def test_list_phrase_matchers(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + call.return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - response = client.list_phrase_matchers(request) + response = client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_empty_call(): +def test_create_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -12495,18 +12467,18 @@ def test_list_phrase_matchers_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_phrase_matchers() + client.create_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest() + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() -def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): +def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -12517,30 +12489,26 @@ def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListPhraseMatchersRequest( + request = contact_center_insights.CreatePhraseMatcherRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_phrase_matchers(request=request) + client.create_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest( + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) -def test_list_phrase_matchers_use_cached_wrapped_rpc(): +def test_create_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12555,7 +12523,8 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_phrase_matchers in client._transport._wrapped_methods + client._transport.create_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12564,15 +12533,15 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_phrase_matchers + client._transport.create_phrase_matcher ] = mock_rpc request = {} - client.list_phrase_matchers(request) + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_phrase_matchers(request) + client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12580,7 +12549,7 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_phrase_matchers_empty_call_async(): +async def test_create_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -12590,22 +12559,28 @@ async def test_list_phrase_matchers_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.list_phrase_matchers() + response = await client.create_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest() + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() @pytest.mark.asyncio -async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( +async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12622,7 +12597,7 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_phrase_matchers + client._client._transport.create_phrase_matcher in client._client._transport._wrapped_methods ) @@ -12630,16 +12605,16 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_phrase_matchers + client._client._transport.create_phrase_matcher ] = mock_rpc request = {} - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12647,9 +12622,9 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_phrase_matchers_async( +async def test_create_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListPhraseMatchersRequest, + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12662,49 +12637,61 @@ async def test_list_phrase_matchers_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.list_phrase_matchers(request) + response = await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_list_phrase_matchers_async_from_dict(): - await test_list_phrase_matchers_async(request_type=dict) +async def test_create_phrase_matcher_async_from_dict(): + await test_create_phrase_matcher_async(request_type=dict) -def test_list_phrase_matchers_field_headers(): +def test_create_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: - call.return_value = contact_center_insights.ListPhraseMatchersResponse() - client.list_phrase_matchers(request) + call.return_value = resources.PhraseMatcher() + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12720,25 +12707,25 @@ def test_list_phrase_matchers_field_headers(): @pytest.mark.asyncio -async def test_list_phrase_matchers_field_headers_async(): +async def test_create_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse() + resources.PhraseMatcher() ) - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12753,21 +12740,22 @@ async def test_list_phrase_matchers_field_headers_async(): ) in kw["metadata"] -def test_list_phrase_matchers_flattened(): +def test_create_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse() + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_phrase_matchers( + client.create_phrase_matcher( parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -12777,9 +12765,12 @@ def test_list_phrase_matchers_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val -def test_list_phrase_matchers_flattened_error(): +def test_create_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12787,32 +12778,34 @@ def test_list_phrase_matchers_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), + client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) @pytest.mark.asyncio -async def test_list_phrase_matchers_flattened_async(): +async def test_create_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse() + call.return_value = resources.PhraseMatcher() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse() + resources.PhraseMatcher() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_phrase_matchers( + response = await client.create_phrase_matcher( parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -12822,10 +12815,13 @@ async def test_list_phrase_matchers_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_list_phrase_matchers_flattened_error_async(): +async def test_create_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12833,301 +12829,115 @@ async def test_list_phrase_matchers_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), + await client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_list_phrase_matchers_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetPhraseMatcherRequest, + dict, + ], +) +def test_get_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - pager = client.list_phrase_matchers(request={}, retry=retry, timeout=timeout) + response = client.get_phrase_matcher(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_pages(transport_name: str = "grpc"): +def test_get_phrase_matcher_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_phrase_matchers(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.get_phrase_matcher() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetPhraseMatcherRequest() -@pytest.mark.asyncio -async def test_list_phrase_matchers_async_pager(): - client = ContactCenterInsightsAsyncClient( +def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_phrase_matchers( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in responses) - - -@pytest.mark.asyncio -async def test_list_phrase_matchers_async_pages(): - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_phrase_matchers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.DeletePhraseMatcherRequest, - dict, - ], -) -def test_delete_phrase_matcher(request_type, transport: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_phrase_matcher(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeletePhraseMatcherRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_phrase_matcher_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_phrase_matcher() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() - - -def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeletePhraseMatcherRequest( + request = contact_center_insights.GetPhraseMatcherRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_phrase_matcher(request=request) + client.get_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest( + assert args[0] == contact_center_insights.GetPhraseMatcherRequest( name="name_value", ) -def test_delete_phrase_matcher_use_cached_wrapped_rpc(): +def test_get_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13142,8 +12952,7 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_phrase_matcher - in client._transport._wrapped_methods + client._transport.get_phrase_matcher in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13152,15 +12961,15 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_phrase_matcher + client._transport.get_phrase_matcher ] = mock_rpc request = {} - client.delete_phrase_matcher(request) + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_phrase_matcher(request) + client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13168,7 +12977,7 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_empty_call_async(): +async def test_get_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13178,18 +12987,28 @@ async def test_delete_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_phrase_matcher() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + ) + response = await client.get_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() + assert args[0] == contact_center_insights.GetPhraseMatcherRequest() @pytest.mark.asyncio -async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13206,7 +13025,7 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_phrase_matcher + client._client._transport.get_phrase_matcher in client._client._transport._wrapped_methods ) @@ -13214,16 +13033,16 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_phrase_matcher + client._client._transport.get_phrase_matcher ] = mock_rpc request = {} - await client.delete_phrase_matcher(request) + await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_phrase_matcher(request) + await client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13231,9 +13050,9 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_phrase_matcher_async( +async def test_get_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeletePhraseMatcherRequest, + request_type=contact_center_insights.GetPhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13246,44 +13065,61 @@ async def test_delete_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + ) + response = await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_delete_phrase_matcher_async_from_dict(): - await test_delete_phrase_matcher_async(request_type=dict) +async def test_get_phrase_matcher_async_from_dict(): + await test_get_phrase_matcher_async(request_type=dict) -def test_delete_phrase_matcher_field_headers(): +def test_get_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - call.return_value = None - client.delete_phrase_matcher(request) + call.return_value = resources.PhraseMatcher() + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13299,23 +13135,25 @@ def test_delete_phrase_matcher_field_headers(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_field_headers_async(): +async def test_get_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) + await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13330,20 +13168,20 @@ async def test_delete_phrase_matcher_field_headers_async(): ) in kw["metadata"] -def test_delete_phrase_matcher_flattened(): +def test_get_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_phrase_matcher( + client.get_phrase_matcher( name="name_value", ) @@ -13356,7 +13194,7 @@ def test_delete_phrase_matcher_flattened(): assert arg == mock_val -def test_delete_phrase_matcher_flattened_error(): +def test_get_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13364,29 +13202,31 @@ def test_delete_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), + client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_phrase_matcher_flattened_async(): +async def test_get_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.PhraseMatcher() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_phrase_matcher( + response = await client.get_phrase_matcher( name="name_value", ) @@ -13400,7 +13240,7 @@ async def test_delete_phrase_matcher_flattened_async(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_flattened_error_async(): +async def test_get_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13408,8 +13248,8 @@ async def test_delete_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), + await client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), name="name_value", ) @@ -13417,11 +13257,11 @@ async def test_delete_phrase_matcher_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdatePhraseMatcherRequest, + contact_center_insights.ListPhraseMatchersRequest, dict, ], ) -def test_update_phrase_matcher(request_type, transport: str = "grpc"): +def test_list_phrase_matchers(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13433,38 +13273,26 @@ def test_update_phrase_matcher(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + call.return_value = contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) - response = client.update_phrase_matcher(request) + response = client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersPager) + assert response.next_page_token == "next_page_token_value" -def test_update_phrase_matcher_empty_call(): +def test_list_phrase_matchers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -13474,18 +13302,18 @@ def test_update_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_phrase_matcher() + client.list_phrase_matchers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest() -def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -13496,22 +13324,30 @@ def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_phrase_matcher(request=request) + client.list_phrase_matchers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) -def test_update_phrase_matcher_use_cached_wrapped_rpc(): +def test_list_phrase_matchers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13526,8 +13362,7 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_phrase_matcher - in client._transport._wrapped_methods + client._transport.list_phrase_matchers in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13536,15 +13371,15 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_phrase_matcher + client._transport.list_phrase_matchers ] = mock_rpc request = {} - client.update_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13552,7 +13387,7 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_phrase_matcher_empty_call_async(): +async def test_list_phrase_matchers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13562,28 +13397,22 @@ async def test_update_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_phrase_matcher() + response = await client.list_phrase_matchers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest() @pytest.mark.asyncio -async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13600,7 +13429,7 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_phrase_matcher + client._client._transport.list_phrase_matchers in client._client._transport._wrapped_methods ) @@ -13608,16 +13437,16 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_phrase_matcher + client._client._transport.list_phrase_matchers ] = mock_rpc request = {} - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13625,9 +13454,9 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_phrase_matcher_async( +async def test_list_phrase_matchers_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdatePhraseMatcherRequest, + request_type=contact_center_insights.ListPhraseMatchersRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13640,61 +13469,49 @@ async def test_update_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_phrase_matcher(request) + response = await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_phrase_matcher_async_from_dict(): - await test_update_phrase_matcher_async(request_type=dict) +async def test_list_phrase_matchers_async_from_dict(): + await test_list_phrase_matchers_async(request_type=dict) -def test_update_phrase_matcher_field_headers(): +def test_list_phrase_matchers_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() - request.phrase_matcher.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: - call.return_value = resources.PhraseMatcher() - client.update_phrase_matcher(request) + call.return_value = contact_center_insights.ListPhraseMatchersResponse() + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13705,30 +13522,30 @@ def test_update_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "phrase_matcher.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_phrase_matcher_field_headers_async(): +async def test_list_phrase_matchers_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() - request.phrase_matcher.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.ListPhraseMatchersResponse() ) - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13739,41 +13556,37 @@ async def test_update_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "phrase_matcher.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_phrase_matcher_flattened(): +def test_list_phrase_matchers_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.ListPhraseMatchersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_phrase_matcher( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_phrase_matchers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_phrase_matcher_flattened_error(): +def test_list_phrase_matchers_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13781,50 +13594,45 @@ def test_update_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_phrase_matcher_flattened_async(): +async def test_list_phrase_matchers_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.ListPhraseMatchersResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.ListPhraseMatchersResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_phrase_matcher( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_phrase_matchers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_phrase_matcher_flattened_error_async(): +async def test_list_phrase_matchers_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13832,21 +13640,222 @@ async def test_update_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CalculateStatsRequest, - dict, - ], +def test_list_phrase_matchers_pager(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_phrase_matchers(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in results) + + +def test_list_phrase_matchers_pages(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + pages = list(client.list_phrase_matchers(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_phrase_matchers_async_pager(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_phrase_matchers( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in responses) + + +@pytest.mark.asyncio +async def test_list_phrase_matchers_async_pages(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_phrase_matchers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeletePhraseMatcherRequest, + dict, + ], ) -def test_calculate_stats(request_type, transport: str = "grpc"): +def test_delete_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13857,27 +13866,24 @@ def test_calculate_stats(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - response = client.calculate_stats(request) + call.return_value = None + response = client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert response is None -def test_calculate_stats_empty_call(): +def test_delete_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -13886,17 +13892,19 @@ def test_calculate_stats_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_stats() + client.delete_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest() + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() -def test_calculate_stats_non_empty_request_with_auto_populated_field(): +def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -13907,26 +13915,26 @@ def test_calculate_stats_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CalculateStatsRequest( - location="location_value", - filter="filter_value", + request = contact_center_insights.DeletePhraseMatcherRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_stats(request=request) + client.delete_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest( - location="location_value", - filter="filter_value", + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest( + name="name_value", ) -def test_calculate_stats_use_cached_wrapped_rpc(): +def test_delete_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13940,21 +13948,26 @@ def test_calculate_stats_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.calculate_stats in client._transport._wrapped_methods + assert ( + client._transport.delete_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_phrase_matcher + ] = mock_rpc request = {} - client.calculate_stats(request) + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_stats(request) + client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13962,7 +13975,7 @@ def test_calculate_stats_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_calculate_stats_empty_call_async(): +async def test_delete_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13971,22 +13984,19 @@ async def test_calculate_stats_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - ) - response = await client.calculate_stats() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest() + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() @pytest.mark.asyncio -async def test_calculate_stats_async_use_cached_wrapped_rpc( +async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14003,7 +14013,7 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.calculate_stats + client._client._transport.delete_phrase_matcher in client._client._transport._wrapped_methods ) @@ -14011,16 +14021,16 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.calculate_stats + client._client._transport.delete_phrase_matcher ] = mock_rpc request = {} - await client.calculate_stats(request) + await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.calculate_stats(request) + await client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14028,9 +14038,9 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_calculate_stats_async( +async def test_delete_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CalculateStatsRequest, + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14042,48 +14052,45 @@ async def test_calculate_stats_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - ) - response = await client.calculate_stats(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert response is None @pytest.mark.asyncio -async def test_calculate_stats_async_from_dict(): - await test_calculate_stats_async(request_type=dict) +async def test_delete_phrase_matcher_async_from_dict(): + await test_delete_phrase_matcher_async(request_type=dict) -def test_calculate_stats_field_headers(): +def test_delete_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value = contact_center_insights.CalculateStatsResponse() - client.calculate_stats(request) + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value = None + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14094,28 +14101,28 @@ def test_calculate_stats_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_calculate_stats_field_headers_async(): +async def test_delete_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse() - ) - await client.calculate_stats(request) + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14126,35 +14133,37 @@ async def test_calculate_stats_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] -def test_calculate_stats_flattened(): +def test_delete_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.calculate_stats( - location="location_value", + client.delete_phrase_matcher( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_calculate_stats_flattened_error(): +def test_delete_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14162,43 +14171,43 @@ def test_calculate_stats_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_calculate_stats_flattened_async(): +async def test_delete_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.calculate_stats( - location="location_value", + response = await client.delete_phrase_matcher( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_calculate_stats_flattened_error_async(): +async def test_delete_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14206,20 +14215,20 @@ async def test_calculate_stats_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + await client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetSettingsRequest, + contact_center_insights.UpdatePhraseMatcherRequest, dict, ], ) -def test_get_settings(request_type, transport: str = "grpc"): +def test_update_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14230,27 +14239,39 @@ def test_get_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings( + call.return_value = resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - response = client.get_settings(request) + response = client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.PhraseMatcher) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_get_settings_empty_call(): +def test_update_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14259,17 +14280,19 @@ def test_get_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_settings() + client.update_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest() + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() -def test_get_settings_non_empty_request_with_auto_populated_field(): +def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14280,24 +14303,22 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetSettingsRequest( - name="name_value", - ) + request = contact_center_insights.UpdatePhraseMatcherRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_settings(request=request) + client.update_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest( - name="name_value", - ) + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() -def test_get_settings_use_cached_wrapped_rpc(): +def test_update_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14311,21 +14332,26 @@ def test_get_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_settings in client._transport._wrapped_methods + assert ( + client._transport.update_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_phrase_matcher + ] = mock_rpc request = {} - client.get_settings(request) + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_settings(request) + client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14333,7 +14359,7 @@ def test_get_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_settings_empty_call_async(): +async def test_update_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -14342,22 +14368,29 @@ async def test_get_settings_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( + resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.get_settings() + response = await client.update_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest() + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() @pytest.mark.asyncio -async def test_get_settings_async_use_cached_wrapped_rpc( +async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14374,7 +14407,7 @@ async def test_get_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_settings + client._client._transport.update_phrase_matcher in client._client._transport._wrapped_methods ) @@ -14382,16 +14415,16 @@ async def test_get_settings_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_settings + client._client._transport.update_phrase_matcher ] = mock_rpc request = {} - await client.get_settings(request) + await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_settings(request) + await client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14399,9 +14432,9 @@ async def test_get_settings_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_settings_async( +async def test_update_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetSettingsRequest, + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14413,48 +14446,62 @@ async def test_get_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( + resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.get_settings(request) + response = await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.PhraseMatcher) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_get_settings_async_from_dict(): - await test_get_settings_async(request_type=dict) +async def test_update_phrase_matcher_async_from_dict(): + await test_update_phrase_matcher_async(request_type=dict) -def test_get_settings_field_headers(): +def test_update_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() - request.name = "name_value" + request.phrase_matcher.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = resources.Settings() - client.get_settings(request) + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: + call.return_value = resources.PhraseMatcher() + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14465,26 +14512,30 @@ def test_get_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "phrase_matcher.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_settings_field_headers_async(): +async def test_update_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() - request.name = "name_value" + request.phrase_matcher.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) - await client.get_settings(request) + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) + await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14495,35 +14546,41 @@ async def test_get_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "phrase_matcher.name=name_value", ) in kw["metadata"] -def test_get_settings_flattened(): +def test_update_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_settings( - name="name_value", + client.update_phrase_matcher( + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_settings_flattened_error(): +def test_update_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14531,41 +14588,50 @@ def test_get_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - contact_center_insights.GetSettingsRequest(), - name="name_value", + client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_settings_flattened_async(): +async def test_update_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = resources.PhraseMatcher() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_settings( - name="name_value", + response = await client.update_phrase_matcher( + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_settings_flattened_error_async(): +async def test_update_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14573,20 +14639,21 @@ async def test_get_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_settings( - contact_center_insights.GetSettingsRequest(), - name="name_value", + await client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateSettingsRequest, + contact_center_insights.CalculateStatsRequest, dict, ], ) -def test_update_settings(request_type, transport: str = "grpc"): +def test_calculate_stats(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14597,27 +14664,27 @@ def test_update_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings( - name="name_value", - language_code="language_code_value", + call.return_value = contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) - response = client.update_settings(request) + response = client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 -def test_update_settings_empty_call(): +def test_calculate_stats_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14626,17 +14693,17 @@ def test_update_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_settings() + client.calculate_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest() -def test_update_settings_non_empty_request_with_auto_populated_field(): +def test_calculate_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14647,20 +14714,26 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest( + location="location_value", + filter="filter_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_settings(request=request) + client.calculate_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest( + location="location_value", + filter="filter_value", + ) -def test_update_settings_use_cached_wrapped_rpc(): +def test_calculate_stats_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14674,21 +14747,21 @@ def test_update_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_settings in client._transport._wrapped_methods + assert client._transport.calculate_stats in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc request = {} - client.update_settings(request) + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_settings(request) + client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14696,7 +14769,7 @@ def test_update_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_settings_empty_call_async(): +async def test_calculate_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -14705,22 +14778,22 @@ async def test_update_settings_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( - name="name_value", - language_code="language_code_value", + contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) ) - response = await client.update_settings() + response = await client.calculate_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest() @pytest.mark.asyncio -async def test_update_settings_async_use_cached_wrapped_rpc( +async def test_calculate_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14737,7 +14810,7 @@ async def test_update_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_settings + client._client._transport.calculate_stats in client._client._transport._wrapped_methods ) @@ -14745,16 +14818,16 @@ async def test_update_settings_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_settings + client._client._transport.calculate_stats ] = mock_rpc request = {} - await client.update_settings(request) + await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_settings(request) + await client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14762,9 +14835,9 @@ async def test_update_settings_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_settings_async( +async def test_calculate_stats_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateSettingsRequest, + request_type=contact_center_insights.CalculateStatsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14776,48 +14849,48 @@ async def test_update_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( - name="name_value", - language_code="language_code_value", + contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) ) - response = await client.update_settings(request) + response = await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 @pytest.mark.asyncio -async def test_update_settings_async_from_dict(): - await test_update_settings_async(request_type=dict) +async def test_calculate_stats_async_from_dict(): + await test_calculate_stats_async(request_type=dict) -def test_update_settings_field_headers(): +def test_calculate_stats_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() - request.settings.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = resources.Settings() - client.update_settings(request) + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + call.return_value = contact_center_insights.CalculateStatsResponse() + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14828,26 +14901,28 @@ def test_update_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "settings.name=name_value", + "location=location_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_settings_field_headers_async(): +async def test_calculate_stats_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() - request.settings.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) - await client.update_settings(request) + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.CalculateStatsResponse() + ) + await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14858,39 +14933,35 @@ async def test_update_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "settings.name=name_value", + "location=location_value", ) in kw["metadata"] -def test_update_settings_flattened(): +def test_calculate_stats_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = contact_center_insights.CalculateStatsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_settings( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_stats( + location="location_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = resources.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].location + mock_val = "location_value" assert arg == mock_val -def test_update_settings_flattened_error(): +def test_calculate_stats_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14898,46 +14969,43 @@ def test_update_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", ) @pytest.mark.asyncio -async def test_update_settings_flattened_async(): +async def test_calculate_stats_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = contact_center_insights.CalculateStatsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.CalculateStatsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_settings( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.calculate_stats( + location="location_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = resources.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].location + mock_val = "location_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_settings_flattened_error_async(): +async def test_calculate_stats_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14945,21 +15013,20 @@ async def test_update_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateViewRequest, + contact_center_insights.GetSettingsRequest, dict, ], ) -def test_create_view(request_type, transport: str = "grpc"): +def test_get_settings(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14970,29 +15037,27 @@ def test_create_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( + call.return_value = resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) - response = client.create_view(request) + response = client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" -def test_create_view_empty_call(): +def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15001,17 +15066,17 @@ def test_create_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_view() + client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest() + assert args[0] == contact_center_insights.GetSettingsRequest() -def test_create_view_non_empty_request_with_auto_populated_field(): +def test_get_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15022,24 +15087,24 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CreateViewRequest( - parent="parent_value", + request = contact_center_insights.GetSettingsRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_view(request=request) + client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest( - parent="parent_value", + assert args[0] == contact_center_insights.GetSettingsRequest( + name="name_value", ) -def test_create_view_use_cached_wrapped_rpc(): +def test_get_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15053,21 +15118,21 @@ def test_create_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_view in client._transport._wrapped_methods + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} - client.create_view(request) + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_view(request) + client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15075,7 +15140,7 @@ def test_create_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_view_empty_call_async(): +async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15084,23 +15149,22 @@ async def test_create_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.create_view() + response = await client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest() + assert args[0] == contact_center_insights.GetSettingsRequest() @pytest.mark.asyncio -async def test_create_view_async_use_cached_wrapped_rpc( +async def test_get_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15117,7 +15181,7 @@ async def test_create_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_view + client._client._transport.get_settings in client._client._transport._wrapped_methods ) @@ -15125,16 +15189,16 @@ async def test_create_view_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_view + client._client._transport.get_settings ] = mock_rpc request = {} - await client.create_view(request) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_view(request) + await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15142,9 +15206,9 @@ async def test_create_view_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_view_async( +async def test_get_settings_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CreateViewRequest, + request_type=contact_center_insights.GetSettingsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15156,50 +15220,48 @@ async def test_create_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.create_view(request) + response = await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" @pytest.mark.asyncio -async def test_create_view_async_from_dict(): - await test_create_view_async(request_type=dict) +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) -def test_create_view_field_headers(): +def test_get_settings_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = resources.View() - client.create_view(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = resources.Settings() + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15210,26 +15272,26 @@ def test_create_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_view_field_headers_async(): +async def test_get_settings_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.create_view(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15240,39 +15302,35 @@ async def test_create_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_view_flattened(): +def test_get_settings_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_view( - parent="parent_value", - view=resources.View(name="name_value"), + client.get_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].view - mock_val = resources.View(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_view_flattened_error(): +def test_get_settings_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15280,46 +15338,41 @@ def test_create_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_view( - contact_center_insights.CreateViewRequest(), - parent="parent_value", - view=resources.View(name="name_value"), + client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_view_flattened_async(): +async def test_get_settings_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_view( - parent="parent_value", - view=resources.View(name="name_value"), + response = await client.get_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].view - mock_val = resources.View(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_view_flattened_error_async(): +async def test_get_settings_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15327,21 +15380,20 @@ async def test_create_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_view( - contact_center_insights.CreateViewRequest(), - parent="parent_value", - view=resources.View(name="name_value"), + await client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetViewRequest, + contact_center_insights.UpdateSettingsRequest, dict, ], ) -def test_get_view(request_type, transport: str = "grpc"): +def test_update_settings(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15352,29 +15404,27 @@ def test_get_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( + call.return_value = resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) - response = client.get_view(request) + response = client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" -def test_get_view_empty_call(): +def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15383,17 +15433,17 @@ def test_get_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_view() + client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest() + assert args[0] == contact_center_insights.UpdateSettingsRequest() -def test_get_view_non_empty_request_with_auto_populated_field(): +def test_update_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15404,24 +15454,20 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetViewRequest( - name="name_value", - ) + request = contact_center_insights.UpdateSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_view(request=request) + client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest( - name="name_value", - ) + assert args[0] == contact_center_insights.UpdateSettingsRequest() -def test_get_view_use_cached_wrapped_rpc(): +def test_update_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15435,21 +15481,21 @@ def test_get_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_view in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} - client.get_view(request) + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_view(request) + client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15457,7 +15503,7 @@ def test_get_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_view_empty_call_async(): +async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15466,23 +15512,24 @@ async def test_get_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.get_view() + response = await client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest() + assert args[0] == contact_center_insights.UpdateSettingsRequest() @pytest.mark.asyncio -async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -15497,7 +15544,7 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_view + client._client._transport.update_settings in client._client._transport._wrapped_methods ) @@ -15505,16 +15552,16 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_view + client._client._transport.update_settings ] = mock_rpc request = {} - await client.get_view(request) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_view(request) + await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15522,8 +15569,9 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_view_async( - transport: str = "grpc_asyncio", request_type=contact_center_insights.GetViewRequest +async def test_update_settings_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.UpdateSettingsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15535,50 +15583,48 @@ async def test_get_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.get_view(request) + response = await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" @pytest.mark.asyncio -async def test_get_view_async_from_dict(): - await test_get_view_async(request_type=dict) +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) -def test_get_view_field_headers(): +def test_update_settings_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() - request.name = "name_value" + request.settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = resources.View() - client.get_view(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = resources.Settings() + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15589,26 +15635,26 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_view_field_headers_async(): +async def test_update_settings_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() - request.name = "name_value" + request.settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.get_view(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15619,35 +15665,39 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "settings.name=name_value", ) in kw["metadata"] -def test_get_view_flattened(): +def test_update_settings_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_view( - name="name_value", + client.update_settings( + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = resources.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_view_flattened_error(): +def test_update_settings_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15655,41 +15705,46 @@ def test_get_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_view( - contact_center_insights.GetViewRequest(), - name="name_value", + client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_view_flattened_async(): +async def test_update_settings_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_view( - name="name_value", + response = await client.update_settings( + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = resources.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_view_flattened_error_async(): +async def test_update_settings_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15697,20 +15752,21 @@ async def test_get_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_view( - contact_center_insights.GetViewRequest(), - name="name_value", + await client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListViewsRequest, + contact_center_insights.GetEncryptionSpecRequest, dict, ], ) -def test_list_views(request_type, transport: str = "grpc"): +def test_get_encryption_spec(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15721,25 +15777,29 @@ def test_list_views(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + call.return_value = resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) - response = client.list_views(request) + response = client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EncryptionSpec) + assert response.name == "name_value" + assert response.kms_key == "kms_key_value" -def test_list_views_empty_call(): +def test_get_encryption_spec_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15748,17 +15808,19 @@ def test_list_views_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_views() + client.get_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest() + assert args[0] == contact_center_insights.GetEncryptionSpecRequest() -def test_list_views_non_empty_request_with_auto_populated_field(): +def test_get_encryption_spec_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15769,26 +15831,26 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListViewsRequest( - parent="parent_value", - page_token="page_token_value", + request = contact_center_insights.GetEncryptionSpecRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_views(request=request) + client.get_encryption_spec(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == contact_center_insights.GetEncryptionSpecRequest( + name="name_value", ) -def test_list_views_use_cached_wrapped_rpc(): +def test_get_encryption_spec_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15802,21 +15864,25 @@ def test_list_views_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_views in client._transport._wrapped_methods + assert ( + client._transport.get_encryption_spec in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_encryption_spec + ] = mock_rpc request = {} - client.list_views(request) + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_views(request) + client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15824,7 +15890,7 @@ def test_list_views_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_views_empty_call_async(): +async def test_get_encryption_spec_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15833,21 +15899,26 @@ async def test_list_views_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) ) - response = await client.list_views() + response = await client.get_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest() + assert args[0] == contact_center_insights.GetEncryptionSpecRequest() @pytest.mark.asyncio -async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_encryption_spec_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -15862,7 +15933,7 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_views + client._client._transport.get_encryption_spec in client._client._transport._wrapped_methods ) @@ -15870,16 +15941,16 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_views + client._client._transport.get_encryption_spec ] = mock_rpc request = {} - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15887,9 +15958,9 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_views_async( +async def test_get_encryption_spec_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListViewsRequest, + request_type=contact_center_insights.GetEncryptionSpecRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15901,46 +15972,52 @@ async def test_list_views_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) ) - response = await client.list_views(request) + response = await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EncryptionSpec) + assert response.name == "name_value" + assert response.kms_key == "kms_key_value" @pytest.mark.asyncio -async def test_list_views_async_from_dict(): - await test_list_views_async(request_type=dict) +async def test_get_encryption_spec_async_from_dict(): + await test_get_encryption_spec_async(request_type=dict) -def test_list_views_field_headers(): +def test_get_encryption_spec_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = contact_center_insights.ListViewsResponse() - client.list_views(request) + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: + call.return_value = resources.EncryptionSpec() + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15951,28 +16028,30 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_views_field_headers_async(): +async def test_get_encryption_spec_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse() + resources.EncryptionSpec() ) - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15983,35 +16062,37 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_views_flattened(): +def test_get_encryption_spec_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse() + call.return_value = resources.EncryptionSpec() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_views( - parent="parent_value", + client.get_encryption_spec( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_views_flattened_error(): +def test_get_encryption_spec_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16019,43 +16100,45 @@ def test_list_views_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_views( - contact_center_insights.ListViewsRequest(), - parent="parent_value", + client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_views_flattened_async(): +async def test_get_encryption_spec_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse() + call.return_value = resources.EncryptionSpec() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse() + resources.EncryptionSpec() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_views( - parent="parent_value", + response = await client.get_encryption_spec( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_views_flattened_error_async(): +async def test_get_encryption_spec_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16063,351 +16146,164 @@ async def test_list_views_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_views( - contact_center_insights.ListViewsRequest(), - parent="parent_value", + await client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), + name="name_value", ) -def test_list_views_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.InitializeEncryptionSpecRequest, + dict, + ], +) +def test_initialize_encryption_spec(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_views(request={}, retry=retry, timeout=timeout) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.initialize_encryption_spec(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.InitializeEncryptionSpecRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.View) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_views_pages(transport_name: str = "grpc"): +def test_initialize_encryption_spec_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.initialize_encryption_spec() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() -@pytest.mark.asyncio -async def test_list_views_async_pager(): - client = ContactCenterInsightsAsyncClient( +def test_initialize_encryption_spec_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.InitializeEncryptionSpecRequest() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + type(client.transport.initialize_encryption_spec), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_views( - request={}, + client.initialize_encryption_spec(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() + + +def test_initialize_encryption_spec_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.View) for i in responses) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.initialize_encryption_spec + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.initialize_encryption_spec + ] = mock_rpc + request = {} + client.initialize_encryption_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_encryption_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_views_async_pages(): +async def test_initialize_encryption_spec_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + type(client.transport.initialize_encryption_spec), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_views(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateViewRequest, - dict, - ], -) -def test_update_view(request_type, transport: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) - response = client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateViewRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" - - -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() - - -def test_update_view_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateViewRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_view(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() - - -def test_update_view_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_view in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_view] = mock_rpc - request = {} - client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_view(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_view() + response = await client.initialize_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() @pytest.mark.asyncio -async def test_update_view_async_use_cached_wrapped_rpc( +async def test_initialize_encryption_spec_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16424,7 +16320,7 @@ async def test_update_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_view + client._client._transport.initialize_encryption_spec in client._client._transport._wrapped_methods ) @@ -16432,16 +16328,21 @@ async def test_update_view_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_view + client._client._transport.initialize_encryption_spec ] = mock_rpc request = {} - await client.update_view(request) + await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_view(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16449,9 +16350,9 @@ async def test_update_view_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_view_async( +async def test_initialize_encryption_spec_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateViewRequest, + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16463,50 +16364,47 @@ async def test_update_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_view(request) + response = await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_view_async_from_dict(): - await test_update_view_async(request_type=dict) +async def test_initialize_encryption_spec_async_from_dict(): + await test_initialize_encryption_spec_async(request_type=dict) -def test_update_view_field_headers(): +def test_initialize_encryption_spec_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() - request.view.name = "name_value" + request.encryption_spec.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = resources.View() - client.update_view(request) + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16517,26 +16415,30 @@ def test_update_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "view.name=name_value", + "encryption_spec.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_view_field_headers_async(): +async def test_initialize_encryption_spec_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() - request.view.name = "name_value" + request.encryption_spec.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.update_view(request) + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16547,39 +16449,37 @@ async def test_update_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "view.name=name_value", + "encryption_spec.name=name_value", ) in kw["metadata"] -def test_update_view_flattened(): +def test_initialize_encryption_spec_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_view( - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + encryption_spec=resources.EncryptionSpec(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].view - mock_val = resources.View(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].encryption_spec + mock_val = resources.EncryptionSpec(name="name_value") assert arg == mock_val -def test_update_view_flattened_error(): +def test_initialize_encryption_spec_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16587,46 +16487,45 @@ def test_update_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_view( - contact_center_insights.UpdateViewRequest(), - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) @pytest.mark.asyncio -async def test_update_view_flattened_async(): +async def test_initialize_encryption_spec_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_view( - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.initialize_encryption_spec( + encryption_spec=resources.EncryptionSpec(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].view - mock_val = resources.View(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].encryption_spec + mock_val = resources.EncryptionSpec(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_update_view_flattened_error_async(): +async def test_initialize_encryption_spec_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16634,21 +16533,20 @@ async def test_update_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_view( - contact_center_insights.UpdateViewRequest(), - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteViewRequest, + contact_center_insights.CreateViewRequest, dict, ], ) -def test_delete_view(request_type, transport: str = "grpc"): +def test_create_view(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16659,22 +16557,29 @@ def test_delete_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_view(request) + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" -def test_delete_view_empty_call(): +def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -16683,17 +16588,17 @@ def test_delete_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_view() + client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest() + assert args[0] == contact_center_insights.CreateViewRequest() -def test_delete_view_non_empty_request_with_auto_populated_field(): +def test_create_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -16704,24 +16609,24 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeleteViewRequest( - name="name_value", + request = contact_center_insights.CreateViewRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_view(request=request) + client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest( - name="name_value", + assert args[0] == contact_center_insights.CreateViewRequest( + parent="parent_value", ) -def test_delete_view_use_cached_wrapped_rpc(): +def test_create_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16735,21 +16640,21 @@ def test_delete_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_view in client._transport._wrapped_methods + assert client._transport.create_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc request = {} - client.delete_view(request) + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_view(request) + client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16757,7 +16662,7 @@ def test_delete_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_view_empty_call_async(): +async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -16766,17 +16671,23 @@ async def test_delete_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest() + assert args[0] == contact_center_insights.CreateViewRequest() @pytest.mark.asyncio -async def test_delete_view_async_use_cached_wrapped_rpc( +async def test_create_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16793,7 +16704,7 @@ async def test_delete_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_view + client._client._transport.create_view in client._client._transport._wrapped_methods ) @@ -16801,16 +16712,16 @@ async def test_delete_view_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_view + client._client._transport.create_view ] = mock_rpc request = {} - await client.delete_view(request) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_view(request) + await client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16818,9 +16729,9 @@ async def test_delete_view_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_view_async( +async def test_create_view_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeleteViewRequest, + request_type=contact_center_insights.CreateViewRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16832,41 +16743,50 @@ async def test_delete_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" @pytest.mark.asyncio -async def test_delete_view_async_from_dict(): - await test_delete_view_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_delete_view_field_headers(): +def test_create_view_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = None - client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = resources.View() + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16877,26 +16797,26 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_view_field_headers_async(): +async def test_create_view_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16907,35 +16827,39 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_view_flattened(): +def test_create_view_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.View() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_view( - name="name_value", + client.create_view( + parent="parent_value", + view=resources.View(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].view + mock_val = resources.View(name="name_value") assert arg == mock_val -def test_delete_view_flattened_error(): +def test_create_view_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16943,41 +16867,46 @@ def test_delete_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_view( - contact_center_insights.DeleteViewRequest(), - name="name_value", + client.create_view( + contact_center_insights.CreateViewRequest(), + parent="parent_value", + view=resources.View(name="name_value"), ) @pytest.mark.asyncio -async def test_delete_view_flattened_async(): +async def test_create_view_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.View() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_view( - name="name_value", + response = await client.create_view( + parent="parent_value", + view=resources.View(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].view + mock_val = resources.View(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_delete_view_flattened_error_async(): +async def test_create_view_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16985,72 +16914,2599 @@ async def test_delete_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_view( - contact_center_insights.DeleteViewRequest(), - name="name_value", + await client.create_view( + contact_center_insights.CreateViewRequest(), + parent="parent_value", + view=resources.View(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateConversationRequest, + contact_center_insights.GetViewRequest, dict, ], ) -def test_create_conversation_rest(request_type): +def test_get_view(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["conversation"] = { - "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, - "expire_time": {"seconds": 751, "nanos": 543}, - "ttl": {"seconds": 751, "nanos": 543}, - "name": "name_value", - "data_source": { - "gcs_source": { - "audio_uri": "audio_uri_value", - "transcript_uri": "transcript_uri_value", - }, - "dialogflow_source": { - "dialogflow_conversation": "dialogflow_conversation_value", - "audio_uri": "audio_uri_value", - }, - }, - "create_time": {}, - "update_time": {}, - "start_time": {}, - "language_code": "language_code_value", - "agent_id": "agent_id_value", - "labels": {}, - "quality_metadata": { - "customer_satisfaction_rating": 3005, - "wait_duration": {}, - "menu_path": "menu_path_value", - "agent_info": [ - { - "agent_id": "agent_id_value", - "display_name": "display_name_value", - "team": "team_value", - "disposition_code": "disposition_code_value", - } - ], - }, - "transcript": { - "transcript_segments": [ - { - "message_time": {}, - "text": "text_value", - "confidence": 0.1038, - "words": [ - { - "start_offset": {}, - "end_offset": {}, - "word": "word_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest() + + +def test_get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.GetViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest( + name="name_value", + ) + + +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest() + + +@pytest.mark.asyncio +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_rpc + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=contact_center_insights.GetViewRequest +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.GetViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = resources.View() + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.GetViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_view( + contact_center_insights.GetViewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_view( + contact_center_insights.GetViewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.ListViewsRequest, + dict, + ], +) +def test_list_views(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_views_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest() + + +def test_list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest() + + +@pytest.mark.asyncio +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_rpc + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.ListViewsRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.ListViewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = contact_center_insights.ListViewsResponse() + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.ListViewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse() + ) + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_views_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_views_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + contact_center_insights.ListViewsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + contact_center_insights.ListViewsRequest(), + parent="parent_value", + ) + + +def test_list_views_pager(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.View) for i in results) + + +def test_list_views_pages(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.View) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateViewRequest, + dict, + ], +) +def test_update_view(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +def test_update_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +def test_update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.UpdateViewRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + request = {} + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_rpc + + request = {} + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.UpdateViewRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +@pytest.mark.asyncio +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) + + +def test_update_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.UpdateViewRequest() + + request.view.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = resources.View() + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "view.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.UpdateViewRequest() + + request.view.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "view.name=name_value", + ) in kw["metadata"] + + +def test_update_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_view( + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].view + mock_val = resources.View(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_view( + contact_center_insights.UpdateViewRequest(), + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_view( + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].view + mock_val = resources.View(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_view( + contact_center_insights.UpdateViewRequest(), + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeleteViewRequest, + dict, + ], +) +def test_delete_view(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest() + + +def test_delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.DeleteViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest( + name="name_value", + ) + + +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_rpc + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.DeleteViewRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.DeleteViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.DeleteViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_view( + contact_center_insights.DeleteViewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_view( + contact_center_insights.DeleteViewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.CreateConversationRequest, + dict, + ], +) +def test_create_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["conversation"] = { + "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "name_value", + "data_source": { + "gcs_source": { + "audio_uri": "audio_uri_value", + "transcript_uri": "transcript_uri_value", + }, + "dialogflow_source": { + "dialogflow_conversation": "dialogflow_conversation_value", + "audio_uri": "audio_uri_value", + }, + }, + "create_time": {}, + "update_time": {}, + "start_time": {}, + "language_code": "language_code_value", + "agent_id": "agent_id_value", + "labels": {}, + "quality_metadata": { + "customer_satisfaction_rating": 3005, + "wait_duration": {}, + "menu_path": "menu_path_value", + "agent_info": [ + { + "agent_id": "agent_id_value", + "display_name": "display_name_value", + "team": "team_value", + "disposition_code": "disposition_code_value", + } + ], + }, + "metadata_json": "metadata_json_value", + "transcript": { + "transcript_segments": [ + { + "message_time": {}, + "text": "text_value", + "confidence": 0.1038, + "words": [ + { + "start_offset": {}, + "end_offset": {}, + "word": "word_value", + "confidence": 0.1038, + } + ], + "language_code": "language_code_value", + "channel_tag": 1140, + "segment_participant": { + "dialogflow_participant_name": "dialogflow_participant_name_value", + "user_id": "user_id_value", + "dialogflow_participant": "dialogflow_participant_value", + "obfuscated_external_user_id": "obfuscated_external_user_id_value", + "role": 1, + }, + "dialogflow_segment_metadata": { + "smart_reply_allowlist_covered": True + }, + "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, + } + ] + }, + "medium": 1, + "duration": {}, + "turn_count": 1105, + "latest_analysis": { + "name": "name_value", + "request_time": {}, + "create_time": {}, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": {}, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": {"silence_duration": {}, "silence_percentage": 0.1888}, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, + }, + }, + "end_time": {}, + }, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "latest_summary": { + "text": "text_value", + "text_sections": {}, + "confidence": 0.1038, + "metadata": {}, + "answer_record": "answer_record_value", + "conversation_model": "conversation_model_value", + }, + "runtime_annotations": [ + { + "article_suggestion": { + "title": "title_value", + "uri": "uri_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "faq_answer": { + "answer": "answer_value", + "confidence_score": 0.1673, + "question": "question_value", + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "smart_reply": { + "reply": "reply_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "smart_compose_suggestion": { + "suggestion": "suggestion_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "dialogflow_interaction": { + "dialogflow_intent_id": "dialogflow_intent_id_value", + "confidence": 0.1038, + }, + "conversation_summarization_suggestion": {}, + "annotation_id": "annotation_id_value", + "create_time": {}, + "start_boundary": {}, + "end_boundary": {}, + "answer_feedback": { + "correctness_level": 1, + "clicked": True, + "displayed": True, + }, + "user_input": { + "query": "query_value", + "generator_name": "generator_name_value", + "query_source": 1, + }, + } + ], + "dialogflow_intents": {}, + "obfuscated_user_id": "obfuscated_user_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_create_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_conversation + ] = mock_rpc + + request = {} + client.create_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_conversation_rest_required_fields( + request_type=contact_center_insights.CreateConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("conversation_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("conversationId",)) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.CreateConversationRequest.pb( + contact_center_insights.CreateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.CreateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.create_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CreateConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_conversation(request) + + +def test_create_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + conversation_id="conversation_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/conversations" + % client.transport._host, + args[1], + ) + + +def test_create_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversation( + contact_center_insights.CreateConversationRequest(), + parent="parent_value", + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + conversation_id="conversation_id_value", + ) + + +def test_create_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UploadConversationRequest, + dict, + ], +) +def test_upload_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.upload_conversation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_upload_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.upload_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.upload_conversation + ] = mock_rpc + + request = {} + client.upload_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.upload_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_upload_conversation_rest_required_fields( + request_type=contact_center_insights.UploadConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upload_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upload_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.upload_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_upload_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.upload_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upload_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.UploadConversationRequest.pb( + contact_center_insights.UploadConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = contact_center_insights.UploadConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upload_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UploadConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_conversation(request) + + +def test_upload_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateConversationRequest, + dict, + ], +) +def test_update_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + request_init["conversation"] = { + "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "projects/sample1/locations/sample2/conversations/sample3", + "data_source": { + "gcs_source": { + "audio_uri": "audio_uri_value", + "transcript_uri": "transcript_uri_value", + }, + "dialogflow_source": { + "dialogflow_conversation": "dialogflow_conversation_value", + "audio_uri": "audio_uri_value", + }, + }, + "create_time": {}, + "update_time": {}, + "start_time": {}, + "language_code": "language_code_value", + "agent_id": "agent_id_value", + "labels": {}, + "quality_metadata": { + "customer_satisfaction_rating": 3005, + "wait_duration": {}, + "menu_path": "menu_path_value", + "agent_info": [ + { + "agent_id": "agent_id_value", + "display_name": "display_name_value", + "team": "team_value", + "disposition_code": "disposition_code_value", + } + ], + }, + "metadata_json": "metadata_json_value", + "transcript": { + "transcript_segments": [ + { + "message_time": {}, + "text": "text_value", + "confidence": 0.1038, + "words": [ + { + "start_offset": {}, + "end_offset": {}, + "word": "word_value", "confidence": 0.1038, } ], @@ -17114,6 +19570,7 @@ def test_create_conversation_rest(request_type): ], "entities": {}, "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": {"silence_duration": {}, "silence_percentage": 0.1888}, "intents": {}, "phrase_matchers": {}, "issue_model_result": { @@ -17192,116 +19649,766 @@ def test_create_conversation_rest(request_type): "clicked": True, "displayed": True, }, + "user_input": { + "query": "query_value", + "generator_name": "generator_name_value", + "query_source": 1, + }, + } + ], + "dialogflow_intents": {}, + "obfuscated_user_id": "obfuscated_user_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_update_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_conversation + ] = mock_rpc + + request = {} + client.update_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_conversation_rest_required_fields( + request_type=contact_center_insights.UpdateConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.UpdateConversationRequest.pb( + contact_center_insights.UpdateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.UpdateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.update_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UpdateConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_conversation(request) + + +def test_update_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{conversation.name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversation( + contact_center_insights.UpdateConversationRequest(), + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetConversationRequest, + dict, + ], +) +def test_get_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_get_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_conversation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_conversation + ] = mock_rpc + + request = {} + client.get_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_conversation_rest_required_fields( + request_type=contact_center_insights.GetConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } - ], - "dialogflow_intents": {}, - "obfuscated_user_id": "obfuscated_user_id_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + transcode.return_value = transcode_result - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateConversationRequest.meta.fields[ - "conversation" - ] + response_value = Response() + response_value.status_code = 200 - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + response = client.get_conversation(request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_get_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + unset_fields = transport.get_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.GetConversationRequest.pb( + contact_center_insights.GetConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.GetConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.get_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_conversation_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetConversationRequest +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_conversation(request) + + +def test_get_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversation( + contact_center_insights.GetConversationRequest(), + name="name_value", + ) + + +def test_get_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.ListConversationsRequest, + dict, + ], +) +def test_list_conversations_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] - else: - del request_init["conversation"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( - name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", + return_value = contact_center_insights.ListConversationsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.list_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) - assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" + assert isinstance(response, pagers.ListConversationsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_conversation_rest_use_cached_wrapped_rpc(): +def test_list_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17316,7 +20423,7 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_conversation in client._transport._wrapped_methods + client._transport.list_conversations in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17325,24 +20432,24 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_conversation + client._transport.list_conversations ] = mock_rpc request = {} - client.create_conversation(request) + client.list_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_conversation(request) + client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_conversation_rest_required_fields( - request_type=contact_center_insights.CreateConversationRequest, +def test_list_conversations_rest_required_fields( + request_type=contact_center_insights.ListConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -17358,7 +20465,7 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).list_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17367,9 +20474,17 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).list_conversations._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("conversation_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17383,7 +20498,7 @@ def test_create_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = contact_center_insights.ListConversationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17395,48 +20510,52 @@ def test_create_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.list_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_conversation_rest_unset_required_fields(): +def test_list_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_conversation._get_unset_required_fields({}) + unset_fields = transport.list_conversations._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("conversationId",)) - & set( + set( ( - "parent", - "conversation", + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_conversation_rest_interceptors(null_interceptor): +def test_list_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17449,14 +20568,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" + transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateConversationRequest.pb( - contact_center_insights.CreateConversationRequest() + pb_message = contact_center_insights.ListConversationsRequest.pb( + contact_center_insights.ListConversationsRequest() ) transcode.return_value = { "method": "post", @@ -17468,19 +20587,21 @@ def test_create_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() + req.return_value._content = ( + contact_center_insights.ListConversationsResponse.to_json( + contact_center_insights.ListConversationsResponse() + ) ) - request = contact_center_insights.CreateConversationRequest() + request = contact_center_insights.ListConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = contact_center_insights.ListConversationsResponse() - client.create_conversation( + client.list_conversations( request, metadata=[ ("key", "val"), @@ -17492,9 +20613,9 @@ def test_create_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_conversation_rest_bad_request( +def test_list_conversations_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreateConversationRequest, + request_type=contact_center_insights.ListConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17514,10 +20635,10 @@ def test_create_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_conversation(request) + client.list_conversations(request) -def test_create_conversation_rest_flattened(): +def test_list_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17526,7 +20647,7 @@ def test_create_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = contact_center_insights.ListConversationsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -17534,10 +20655,6 @@ def test_create_conversation_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - conversation_id="conversation_id_value", ) mock_args.update(sample_request) @@ -17545,12 +20662,14 @@ def test_create_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_conversation(**mock_args) + client.list_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -17563,7 +20682,7 @@ def test_create_conversation_rest_flattened(): ) -def test_create_conversation_rest_flattened_error(transport: str = "rest"): +def test_list_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17572,58 +20691,112 @@ def test_create_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_conversation( - contact_center_insights.CreateConversationRequest(), + client.list_conversations( + contact_center_insights.ListConversationsRequest(), parent="parent_value", - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - conversation_id="conversation_id_value", ) -def test_create_conversation_rest_error(): +def test_list_conversations_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + resources.Conversation(), + resources.Conversation(), + ], + next_page_token="abc", + ), + contact_center_insights.ListConversationsResponse( + conversations=[], + next_page_token="def", + ), + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + resources.Conversation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + contact_center_insights.ListConversationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_conversations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Conversation) for i in results) + + pages = list(client.list_conversations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UploadConversationRequest, + contact_center_insights.DeleteConversationRequest, dict, ], ) -def test_upload_conversation_rest(request_type): +def test_delete_conversation_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.upload_conversation(request) + response = client.delete_conversation(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert response is None -def test_upload_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_conversation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17638,7 +20811,7 @@ def test_upload_conversation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.upload_conversation in client._transport._wrapped_methods + client._transport.delete_conversation in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17647,33 +20820,29 @@ def test_upload_conversation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.upload_conversation + client._transport.delete_conversation ] = mock_rpc request = {} - client.upload_conversation(request) + client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.upload_conversation(request) + client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_upload_conversation_rest_required_fields( - request_type=contact_center_insights.UploadConversationRequest, +def test_delete_conversation_rest_required_fields( + request_type=contact_center_insights.DeleteConversationRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17684,21 +20853,23 @@ def test_upload_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).upload_conversation._get_unset_required_fields(jsonified_request) + ).delete_conversation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).upload_conversation._get_unset_required_fields(jsonified_request) + ).delete_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17707,7 +20878,7 @@ def test_upload_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17719,45 +20890,36 @@ def test_upload_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.upload_conversation(request) + response = client.delete_conversation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_upload_conversation_rest_unset_required_fields(): +def test_delete_conversation_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.upload_conversation._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "conversation", - ) - ) - ) + unset_fields = transport.delete_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_conversation_rest_interceptors(null_interceptor): +def test_delete_conversation_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17770,16 +20932,11 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_conversation" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.UploadConversationRequest.pb( - contact_center_insights.UploadConversationRequest() + pb_message = contact_center_insights.DeleteConversationRequest.pb( + contact_center_insights.DeleteConversationRequest() ) transcode.return_value = { "method": "post", @@ -17791,19 +20948,15 @@ def test_upload_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - request = contact_center_insights.UploadConversationRequest() + request = contact_center_insights.DeleteConversationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - client.upload_conversation( + client.delete_conversation( request, metadata=[ ("key", "val"), @@ -17812,12 +20965,11 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_upload_conversation_rest_bad_request( +def test_delete_conversation_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.UploadConversationRequest, + request_type=contact_center_insights.DeleteConversationRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17825,7 +20977,7 @@ def test_upload_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17837,10 +20989,67 @@ def test_upload_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_conversation(request) + client.delete_conversation(request) -def test_upload_conversation_rest_error(): +def test_delete_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversation( + contact_center_insights.DeleteConversationRequest(), + name="name_value", + ) + + +def test_delete_conversation_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17849,11 +21058,11 @@ def test_upload_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateConversationRequest, + contact_center_insights.CreateAnalysisRequest, dict, ], ) -def test_update_conversation_rest(request_type): +def test_create_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17861,209 +21070,88 @@ def test_update_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } - request_init["conversation"] = { - "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, - "expire_time": {"seconds": 751, "nanos": 543}, - "ttl": {"seconds": 751, "nanos": 543}, - "name": "projects/sample1/locations/sample2/conversations/sample3", - "data_source": { - "gcs_source": { - "audio_uri": "audio_uri_value", - "transcript_uri": "transcript_uri_value", - }, - "dialogflow_source": { - "dialogflow_conversation": "dialogflow_conversation_value", - "audio_uri": "audio_uri_value", - }, - }, + request_init["analysis"] = { + "name": "name_value", + "request_time": {"seconds": 751, "nanos": 543}, "create_time": {}, - "update_time": {}, - "start_time": {}, - "language_code": "language_code_value", - "agent_id": "agent_id_value", - "labels": {}, - "quality_metadata": { - "customer_satisfaction_rating": 3005, - "wait_duration": {}, - "menu_path": "menu_path_value", - "agent_info": [ - { - "agent_id": "agent_id_value", - "display_name": "display_name_value", - "team": "team_value", - "disposition_code": "disposition_code_value", - } - ], - }, - "transcript": { - "transcript_segments": [ - { - "message_time": {}, - "text": "text_value", - "confidence": 0.1038, - "words": [ - { - "start_offset": {}, - "end_offset": {}, - "word": "word_value", - "confidence": 0.1038, - } - ], - "language_code": "language_code_value", - "channel_tag": 1140, - "segment_participant": { - "dialogflow_participant_name": "dialogflow_participant_name_value", - "user_id": "user_id_value", - "dialogflow_participant": "dialogflow_participant_value", - "obfuscated_external_user_id": "obfuscated_external_user_id_value", - "role": 1, - }, - "dialogflow_segment_metadata": { - "smart_reply_allowlist_covered": True - }, - "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, - } - ] - }, - "medium": 1, - "duration": {}, - "turn_count": 1105, - "latest_analysis": { - "name": "name_value", - "request_time": {}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": {}, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": { + "magnitude": 0.9580000000000001, + "score": 0.54, + }, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": { + "silence_duration": {"seconds": 751, "nanos": 543}, + "silence_percentage": 0.1888, }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, }, }, + "end_time": {}, }, - "latest_summary": { - "text": "text_value", - "text_sections": {}, - "confidence": 0.1038, - "metadata": {}, - "answer_record": "answer_record_value", - "conversation_model": "conversation_model_value", + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, }, - "runtime_annotations": [ - { - "article_suggestion": { - "title": "title_value", - "uri": "uri_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "faq_answer": { - "answer": "answer_value", - "confidence_score": 0.1673, - "question": "question_value", - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "smart_reply": { - "reply": "reply_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "smart_compose_suggestion": { - "suggestion": "suggestion_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "dialogflow_interaction": { - "dialogflow_intent_id": "dialogflow_intent_id_value", - "confidence": 0.1038, - }, - "conversation_summarization_suggestion": {}, - "annotation_id": "annotation_id_value", - "create_time": {}, - "start_boundary": {}, - "end_boundary": {}, - "answer_feedback": { - "correctness_level": 1, - "clicked": True, - "displayed": True, - }, - } - ], - "dialogflow_intents": {}, - "obfuscated_user_id": "obfuscated_user_id_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateConversationRequest.meta.fields[ - "conversation" - ] + test_field = contact_center_insights.CreateAnalysisRequest.meta.fields["analysis"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18091,7 +21179,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER + for field, value in request_init["analysis"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18121,46 +21209,31 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] + for i in range(0, len(request_init["analysis"][field])): + del request_init["analysis"][field][i][subfield] else: - del request_init["conversation"][field][subfield] + del request_init["analysis"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( - name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.create_analysis(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) - assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" + assert response.operation.name == "operations/spam" -def test_update_conversation_rest_use_cached_wrapped_rpc(): +def test_create_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18174,38 +21247,39 @@ def test_update_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_conversation in client._transport._wrapped_methods - ) + assert client._transport.create_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_analysis] = mock_rpc request = {} - client.update_conversation(request) + client.create_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_conversation(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_conversation_rest_required_fields( - request_type=contact_center_insights.UpdateConversationRequest, +def test_create_analysis_rest_required_fields( + request_type=contact_center_insights.CreateAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18216,19 +21290,21 @@ def test_update_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) + ).create_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).create_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18237,7 +21313,7 @@ def test_update_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18249,7 +21325,7 @@ def test_update_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18257,32 +21333,37 @@ def test_update_conversation_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.create_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_conversation_rest_unset_required_fields(): +def test_create_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + unset_fields = transport.create_analysis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analysis", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_conversation_rest_interceptors(null_interceptor): +def test_create_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18295,14 +21376,16 @@ def test_update_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateConversationRequest.pb( - contact_center_insights.UpdateConversationRequest() + pb_message = contact_center_insights.CreateAnalysisRequest.pb( + contact_center_insights.CreateAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -18314,19 +21397,19 @@ def test_update_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.UpdateConversationRequest() + request = contact_center_insights.CreateAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = operations_pb2.Operation() - client.update_conversation( + client.create_analysis( request, metadata=[ ("key", "val"), @@ -18338,9 +21421,8 @@ def test_update_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_conversation_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UpdateConversationRequest, +def test_create_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.CreateAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18349,9 +21431,7 @@ def test_update_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } request = request_type(**request_init) @@ -18364,10 +21444,10 @@ def test_update_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_conversation(request) + client.create_analysis(request) -def test_update_conversation_rest_flattened(): +def test_create_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18376,47 +21456,41 @@ def test_update_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } # get truthy value for each flattened field mock_args = dict( - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + analysis=resources.Analysis(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_conversation(**mock_args) + client.create_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{conversation.name=projects/*/locations/*/conversations/*}" + "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" % client.transport._host, args[1], ) -def test_update_conversation_rest_flattened_error(transport: str = "rest"): +def test_create_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18425,16 +21499,14 @@ def test_update_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_conversation( - contact_center_insights.UpdateConversationRequest(), - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_analysis( + contact_center_insights.CreateAnalysisRequest(), + parent="parent_value", + analysis=resources.Analysis(name="name_value"), ) -def test_update_conversation_rest_error(): +def test_create_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18443,54 +21515,46 @@ def test_update_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetConversationRequest, + contact_center_insights.GetAnalysisRequest, dict, ], ) -def test_get_conversation_rest(request_type): +def test_get_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( + return_value = resources.Analysis( name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_analysis(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) + assert isinstance(response, resources.Analysis) assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" -def test_get_conversation_rest_use_cached_wrapped_rpc(): +def test_get_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18504,32 +21568,30 @@ def test_get_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_conversation in client._transport._wrapped_methods + assert client._transport.get_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_analysis] = mock_rpc request = {} - client.get_conversation(request) + client.get_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_conversation(request) + client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_conversation_rest_required_fields( - request_type=contact_center_insights.GetConversationRequest, +def test_get_analysis_rest_required_fields( + request_type=contact_center_insights.GetAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -18545,7 +21607,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) + ).get_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18554,9 +21616,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + ).get_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18570,7 +21630,7 @@ def test_get_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = resources.Analysis() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18591,30 +21651,30 @@ def test_get_conversation_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_conversation_rest_unset_required_fields(): +def test_get_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.get_analysis._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_conversation_rest_interceptors(null_interceptor): +def test_get_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18627,14 +21687,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" + transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetConversationRequest.pb( - contact_center_insights.GetConversationRequest() + pb_message = contact_center_insights.GetAnalysisRequest.pb( + contact_center_insights.GetAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -18646,19 +21706,17 @@ def test_get_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() - ) + req.return_value._content = resources.Analysis.to_json(resources.Analysis()) - request = contact_center_insights.GetConversationRequest() + request = contact_center_insights.GetAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = resources.Analysis() - client.get_conversation( + client.get_analysis( request, metadata=[ ("key", "val"), @@ -18670,8 +21728,8 @@ def test_get_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_conversation_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetConversationRequest +def test_get_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18679,7 +21737,9 @@ def test_get_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18691,10 +21751,10 @@ def test_get_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_conversation(request) + client.get_analysis(request) -def test_get_conversation_rest_flattened(): +def test_get_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18703,11 +21763,11 @@ def test_get_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = resources.Analysis() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3" + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" } # get truthy value for each flattened field @@ -18720,25 +21780,25 @@ def test_get_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_conversation(**mock_args) + client.get_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" % client.transport._host, args[1], ) -def test_get_conversation_rest_flattened_error(transport: str = "rest"): +def test_get_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18747,13 +21807,13 @@ def test_get_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_conversation( - contact_center_insights.GetConversationRequest(), + client.get_analysis( + contact_center_insights.GetAnalysisRequest(), name="name_value", ) -def test_get_conversation_rest_error(): +def test_get_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18762,24 +21822,26 @@ def test_get_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListConversationsRequest, + contact_center_insights.ListAnalysesRequest, dict, ], ) -def test_list_conversations_rest(request_type): +def test_list_analyses_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse( + return_value = contact_center_insights.ListAnalysesResponse( next_page_token="next_page_token_value", ) @@ -18787,21 +21849,19 @@ def test_list_conversations_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_analyses(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversationsPager) + assert isinstance(response, pagers.ListAnalysesPager) assert response.next_page_token == "next_page_token_value" -def test_list_conversations_rest_use_cached_wrapped_rpc(): +def test_list_analyses_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18815,34 +21875,30 @@ def test_list_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_conversations in client._transport._wrapped_methods - ) + assert client._transport.list_analyses in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_analyses] = mock_rpc request = {} - client.list_conversations(request) + client.list_analyses(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_conversations(request) + client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_conversations_rest_required_fields( - request_type=contact_center_insights.ListConversationsRequest, +def test_list_analyses_rest_required_fields( + request_type=contact_center_insights.ListAnalysesRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -18858,7 +21914,7 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_analyses._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18867,14 +21923,13 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_analyses._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "filter", "page_size", "page_token", - "view", ) ) jsonified_request.update(unset_fields) @@ -18890,7 +21945,7 @@ def test_list_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse() + return_value = contact_center_insights.ListAnalysesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18911,34 +21966,31 @@ def test_list_conversations_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_analyses(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_conversations_rest_unset_required_fields(): +def test_list_analyses_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_conversations._get_unset_required_fields({}) + unset_fields = transport.list_analyses._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "filter", "pageSize", "pageToken", - "view", ) ) & set(("parent",)) @@ -18946,7 +21998,7 @@ def test_list_conversations_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_conversations_rest_interceptors(null_interceptor): +def test_list_analyses_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18959,14 +22011,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" + transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListConversationsRequest.pb( - contact_center_insights.ListConversationsRequest() + pb_message = contact_center_insights.ListAnalysesRequest.pb( + contact_center_insights.ListAnalysesRequest() ) transcode.return_value = { "method": "post", @@ -18979,20 +22031,20 @@ def test_list_conversations_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - contact_center_insights.ListConversationsResponse.to_json( - contact_center_insights.ListConversationsResponse() + contact_center_insights.ListAnalysesResponse.to_json( + contact_center_insights.ListAnalysesResponse() ) ) - request = contact_center_insights.ListConversationsRequest() + request = contact_center_insights.ListAnalysesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListConversationsResponse() + post.return_value = contact_center_insights.ListAnalysesResponse() - client.list_conversations( + client.list_analyses( request, metadata=[ ("key", "val"), @@ -19004,9 +22056,8 @@ def test_list_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_conversations_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.ListConversationsRequest, +def test_list_analyses_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListAnalysesRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19014,7 +22065,9 @@ def test_list_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19026,10 +22079,10 @@ def test_list_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_conversations(request) + client.list_analyses(request) -def test_list_conversations_rest_flattened(): +def test_list_analyses_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19038,10 +22091,12 @@ def test_list_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse() + return_value = contact_center_insights.ListAnalysesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -19053,27 +22108,25 @@ def test_list_conversations_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_conversations(**mock_args) + client.list_analyses(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations" + "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" % client.transport._host, args[1], ) -def test_list_conversations_rest_flattened_error(transport: str = "rest"): +def test_list_analyses_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19082,13 +22135,13 @@ def test_list_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_conversations( - contact_center_insights.ListConversationsRequest(), + client.list_analyses( + contact_center_insights.ListAnalysesRequest(), parent="parent_value", ) -def test_list_conversations_rest_pager(transport: str = "rest"): +def test_list_analyses_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19100,28 +22153,28 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), - resources.Conversation(), - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), + resources.Analysis(), + resources.Analysis(), ], next_page_token="abc", ), - contact_center_insights.ListConversationsResponse( - conversations=[], + contact_center_insights.ListAnalysesResponse( + analyses=[], next_page_token="def", ), - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), ], next_page_token="ghi", ), - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), + resources.Analysis(), ], ), ) @@ -19130,8 +22183,7 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - contact_center_insights.ListConversationsResponse.to_json(x) - for x in response + contact_center_insights.ListAnalysesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -19139,15 +22191,17 @@ def test_list_conversations_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } - pager = client.list_conversations(request=sample_request) + pager = client.list_analyses(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.Conversation) for i in results) + assert all(isinstance(i, resources.Analysis) for i in results) - pages = list(client.list_conversations(request=sample_request).pages) + pages = list(client.list_analyses(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -19155,18 +22209,20 @@ def test_list_conversations_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteConversationRequest, + contact_center_insights.DeleteAnalysisRequest, dict, ], ) -def test_delete_conversation_rest(request_type): +def test_delete_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19181,13 +22237,13 @@ def test_delete_conversation_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_analysis(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19201,34 +22257,30 @@ def test_delete_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_conversation in client._transport._wrapped_methods - ) + assert client._transport.delete_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_analysis] = mock_rpc request = {} - client.delete_conversation(request) + client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_conversation(request) + client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_conversation_rest_required_fields( - request_type=contact_center_insights.DeleteConversationRequest, +def test_delete_analysis_rest_required_fields( + request_type=contact_center_insights.DeleteAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -19244,7 +22296,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) + ).delete_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19253,9 +22305,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + ).delete_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19293,24 +22343,24 @@ def test_delete_conversation_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_conversation_rest_unset_required_fields(): +def test_delete_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.delete_analysis._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_conversation_rest_interceptors(null_interceptor): +def test_delete_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19323,11 +22373,11 @@ def test_delete_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_analysis" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteConversationRequest.pb( - contact_center_insights.DeleteConversationRequest() + pb_message = contact_center_insights.DeleteAnalysisRequest.pb( + contact_center_insights.DeleteAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -19340,14 +22390,14 @@ def test_delete_conversation_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = contact_center_insights.DeleteConversationRequest() + request = contact_center_insights.DeleteAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_conversation( + client.delete_analysis( request, metadata=[ ("key", "val"), @@ -19358,9 +22408,8 @@ def test_delete_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_delete_conversation_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeleteConversationRequest, +def test_delete_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.DeleteAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19368,7 +22417,9 @@ def test_delete_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19380,10 +22431,10 @@ def test_delete_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_conversation(request) + client.delete_analysis(request) -def test_delete_conversation_rest_flattened(): +def test_delete_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19396,7 +22447,7 @@ def test_delete_conversation_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3" + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" } # get truthy value for each flattened field @@ -19412,20 +22463,20 @@ def test_delete_conversation_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_conversation(**mock_args) + client.delete_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" % client.transport._host, args[1], ) -def test_delete_conversation_rest_flattened_error(transport: str = "rest"): +def test_delete_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19434,13 +22485,13 @@ def test_delete_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_conversation( - contact_center_insights.DeleteConversationRequest(), + client.delete_analysis( + contact_center_insights.DeleteAnalysisRequest(), name="name_value", ) -def test_delete_conversation_rest_error(): +def test_delete_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19449,157 +22500,18 @@ def test_delete_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateAnalysisRequest, + contact_center_insights.BulkAnalyzeConversationsRequest, dict, ], ) -def test_create_analysis_rest(request_type): +def test_bulk_analyze_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } - request_init["analysis"] = { - "name": "name_value", - "request_time": {"seconds": 751, "nanos": 543}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": { - "magnitude": 0.9580000000000001, - "score": 0.54, - }, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, - }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateAnalysisRequest.meta.fields["analysis"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["analysis"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["analysis"][field])): - del request_init["analysis"][field][i][subfield] - else: - del request_init["analysis"][field][subfield] + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19614,13 +22526,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_analysis(request) + response = client.bulk_analyze_conversations(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_analysis_rest_use_cached_wrapped_rpc(): +def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19634,17 +22546,22 @@ def test_create_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_analysis in client._transport._wrapped_methods + assert ( + client._transport.bulk_analyze_conversations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.bulk_analyze_conversations + ] = mock_rpc request = {} - client.create_analysis(request) + client.bulk_analyze_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -19653,20 +22570,22 @@ def test_create_analysis_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_analysis(request) + client.bulk_analyze_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_analysis_rest_required_fields( - request_type=contact_center_insights.CreateAnalysisRequest, +def test_bulk_analyze_conversations_rest_required_fields( + request_type=contact_center_insights.BulkAnalyzeConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" + request_init["analysis_percentage"] = 0.0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19677,21 +22596,27 @@ def test_create_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_analysis._get_unset_required_fields(jsonified_request) + ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + jsonified_request["analysisPercentage"] = 0.20170000000000002 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_analysis._get_unset_required_fields(jsonified_request) + ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + assert "analysisPercentage" in jsonified_request + assert jsonified_request["analysisPercentage"] == 0.20170000000000002 client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19725,32 +22650,33 @@ def test_create_analysis_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_analysis(request) + response = client.bulk_analyze_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_analysis_rest_unset_required_fields(): +def test_bulk_analyze_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_analysis._get_unset_required_fields({}) + unset_fields = transport.bulk_analyze_conversations._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "analysis", + "filter", + "analysisPercentage", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_analysis_rest_interceptors(null_interceptor): +def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19765,14 +22691,16 @@ def test_create_analysis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_analyze_conversations", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" + transports.ContactCenterInsightsRestInterceptor, + "pre_bulk_analyze_conversations", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateAnalysisRequest.pb( - contact_center_insights.CreateAnalysisRequest() + pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( + contact_center_insights.BulkAnalyzeConversationsRequest() ) transcode.return_value = { "method": "post", @@ -19788,7 +22716,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.CreateAnalysisRequest() + request = contact_center_insights.BulkAnalyzeConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19796,7 +22724,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_analysis( + client.bulk_analyze_conversations( request, metadata=[ ("key", "val"), @@ -19808,8 +22736,9 @@ def test_create_analysis_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.CreateAnalysisRequest +def test_bulk_analyze_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.BulkAnalyzeConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19817,9 +22746,7 @@ def test_create_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19831,10 +22758,10 @@ def test_create_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_analysis(request) + client.bulk_analyze_conversations(request) -def test_create_analysis_rest_flattened(): +def test_bulk_analyze_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19846,14 +22773,13 @@ def test_create_analysis_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - analysis=resources.Analysis(name="name_value"), + filter="filter_value", + analysis_percentage=0.20170000000000002, ) mock_args.update(sample_request) @@ -19864,20 +22790,20 @@ def test_create_analysis_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_analysis(**mock_args) + client.bulk_analyze_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" + "%s/v1/{parent=projects/*/locations/*}/conversations:bulkAnalyze" % client.transport._host, args[1], ) -def test_create_analysis_rest_flattened_error(transport: str = "rest"): +def test_bulk_analyze_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19886,14 +22812,15 @@ def test_create_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_analysis( - contact_center_insights.CreateAnalysisRequest(), + client.bulk_analyze_conversations( + contact_center_insights.BulkAnalyzeConversationsRequest(), parent="parent_value", - analysis=resources.Analysis(name="name_value"), + filter="filter_value", + analysis_percentage=0.20170000000000002, ) -def test_create_analysis_rest_error(): +def test_bulk_analyze_conversations_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19902,46 +22829,39 @@ def test_create_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetAnalysisRequest, + contact_center_insights.BulkDeleteConversationsRequest, dict, ], ) -def test_get_analysis_rest(request_type): +def test_bulk_delete_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Analysis( - name="name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_analysis(request) + response = client.bulk_delete_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Analysis) - assert response.name == "name_value" + assert response.operation.name == "operations/spam" -def test_get_analysis_rest_use_cached_wrapped_rpc(): +def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19955,35 +22875,44 @@ def test_get_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_analysis in client._transport._wrapped_methods + assert ( + client._transport.bulk_delete_conversations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.bulk_delete_conversations + ] = mock_rpc request = {} - client.get_analysis(request) + client.bulk_delete_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_analysis(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_delete_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_analysis_rest_required_fields( - request_type=contact_center_insights.GetAnalysisRequest, +def test_bulk_delete_conversations_rest_required_fields( + request_type=contact_center_insights.BulkDeleteConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19994,21 +22923,21 @@ def test_get_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_analysis._get_unset_required_fields(jsonified_request) + ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_analysis._get_unset_required_fields(jsonified_request) + ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20017,7 +22946,7 @@ def test_get_analysis_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Analysis() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20029,39 +22958,37 @@ def test_get_analysis_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_analysis(request) + response = client.bulk_delete_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_analysis_rest_unset_required_fields(): +def test_bulk_delete_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_analysis._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.bulk_delete_conversations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_analysis_rest_interceptors(null_interceptor): +def test_bulk_delete_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20074,14 +23001,17 @@ def test_get_analysis_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_delete_conversations", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" + transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetAnalysisRequest.pb( - contact_center_insights.GetAnalysisRequest() + pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( + contact_center_insights.BulkDeleteConversationsRequest() ) transcode.return_value = { "method": "post", @@ -20093,17 +23023,19 @@ def test_get_analysis_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Analysis.to_json(resources.Analysis()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.GetAnalysisRequest() + request = contact_center_insights.BulkDeleteConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Analysis() + post.return_value = operations_pb2.Operation() - client.get_analysis( + client.bulk_delete_conversations( request, metadata=[ ("key", "val"), @@ -20115,8 +23047,9 @@ def test_get_analysis_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetAnalysisRequest +def test_bulk_delete_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.BulkDeleteConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20124,9 +23057,7 @@ def test_get_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20138,10 +23069,10 @@ def test_get_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_analysis(request) + client.bulk_delete_conversations(request) -def test_get_analysis_rest_flattened(): +def test_bulk_delete_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20150,42 +23081,39 @@ def test_get_analysis_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Analysis() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + filter="filter_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_analysis(**mock_args) + client.bulk_delete_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" + "%s/v1/{parent=projects/*/locations/*}/conversations:bulkDelete" % client.transport._host, args[1], ) -def test_get_analysis_rest_flattened_error(transport: str = "rest"): +def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20194,13 +23122,14 @@ def test_get_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_analysis( - contact_center_insights.GetAnalysisRequest(), - name="name_value", + client.bulk_delete_conversations( + contact_center_insights.BulkDeleteConversationsRequest(), + parent="parent_value", + filter="filter_value", ) -def test_get_analysis_rest_error(): +def test_bulk_delete_conversations_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20209,46 +23138,39 @@ def test_get_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListAnalysesRequest, + contact_center_insights.IngestConversationsRequest, dict, ], ) -def test_list_analyses_rest(request_type): +def test_ingest_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_analyses(request) + response = client.ingest_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAnalysesPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_analyses_rest_use_cached_wrapped_rpc(): +def test_ingest_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20262,30 +23184,38 @@ def test_list_analyses_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_analyses in client._transport._wrapped_methods + assert ( + client._transport.ingest_conversations in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_analyses] = mock_rpc + client._transport._wrapped_methods[ + client._transport.ingest_conversations + ] = mock_rpc request = {} - client.list_analyses(request) + client.ingest_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_analyses(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.ingest_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_analyses_rest_required_fields( - request_type=contact_center_insights.ListAnalysesRequest, +def test_ingest_conversations_rest_required_fields( + request_type=contact_center_insights.IngestConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -20301,7 +23231,7 @@ def test_list_analyses_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_analyses._get_unset_required_fields(jsonified_request) + ).ingest_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -20310,15 +23240,7 @@ def test_list_analyses_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_analyses._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).ingest_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -20332,7 +23254,7 @@ def test_list_analyses_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20344,48 +23266,37 @@ def test_list_analyses_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_analyses(request) + response = client.ingest_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_analyses_rest_unset_required_fields(): +def test_ingest_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_analyses._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.ingest_conversations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_analyses_rest_interceptors(null_interceptor): +def test_ingest_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20398,14 +23309,16 @@ def test_list_analyses_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" + transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListAnalysesRequest.pb( - contact_center_insights.ListAnalysesRequest() + pb_message = contact_center_insights.IngestConversationsRequest.pb( + contact_center_insights.IngestConversationsRequest() ) transcode.return_value = { "method": "post", @@ -20417,21 +23330,19 @@ def test_list_analyses_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListAnalysesResponse.to_json( - contact_center_insights.ListAnalysesResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.ListAnalysesRequest() + request = contact_center_insights.IngestConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListAnalysesResponse() + post.return_value = operations_pb2.Operation() - client.list_analyses( + client.ingest_conversations( request, metadata=[ ("key", "val"), @@ -20443,8 +23354,9 @@ def test_list_analyses_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_analyses_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListAnalysesRequest +def test_ingest_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.IngestConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20452,9 +23364,7 @@ def test_list_analyses_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20466,10 +23376,10 @@ def test_list_analyses_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_analyses(request) + client.ingest_conversations(request) -def test_list_analyses_rest_flattened(): +def test_ingest_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20478,12 +23388,10 @@ def test_list_analyses_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -20494,26 +23402,24 @@ def test_list_analyses_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_analyses(**mock_args) + client.ingest_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" + "%s/v1/{parent=projects/*/locations/*}/conversations:ingest" % client.transport._host, args[1], ) -def test_list_analyses_rest_flattened_error(transport: str = "rest"): +def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20522,115 +23428,54 @@ def test_list_analyses_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_analyses( - contact_center_insights.ListAnalysesRequest(), + client.ingest_conversations( + contact_center_insights.IngestConversationsRequest(), parent="parent_value", ) -def test_list_analyses_rest_pager(transport: str = "rest"): +def test_ingest_conversations_rest_error(): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - resources.Analysis(), - resources.Analysis(), - ], - next_page_token="abc", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[], - next_page_token="def", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - resources.Analysis(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - contact_center_insights.ListAnalysesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } - - pager = client.list_analyses(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Analysis) for i in results) - - pages = list(client.list_analyses(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteAnalysisRequest, + contact_center_insights.ExportInsightsDataRequest, dict, ], ) -def test_delete_analysis_rest(request_type): +def test_export_insights_data_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_analysis(request) + response = client.export_insights_data(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_analysis_rest_use_cached_wrapped_rpc(): +def test_export_insights_data_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20644,35 +23489,43 @@ def test_delete_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_analysis in client._transport._wrapped_methods + assert ( + client._transport.export_insights_data in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_insights_data + ] = mock_rpc request = {} - client.delete_analysis(request) + client.export_insights_data(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_analysis(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_insights_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_analysis_rest_required_fields( - request_type=contact_center_insights.DeleteAnalysisRequest, +def test_export_insights_data_rest_required_fields( + request_type=contact_center_insights.ExportInsightsDataRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20683,21 +23536,21 @@ def test_delete_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analysis._get_unset_required_fields(jsonified_request) + ).export_insights_data._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analysis._get_unset_required_fields(jsonified_request) + ).export_insights_data._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20706,7 +23559,7 @@ def test_delete_analysis_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20718,36 +23571,37 @@ def test_delete_analysis_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_analysis(request) + response = client.export_insights_data(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_analysis_rest_unset_required_fields(): +def test_export_insights_data_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_analysis._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.export_insights_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_analysis_rest_interceptors(null_interceptor): +def test_export_insights_data_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20760,11 +23614,16 @@ def test_delete_analysis_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_analysis" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteAnalysisRequest.pb( - contact_center_insights.DeleteAnalysisRequest() + post.assert_not_called() + pb_message = contact_center_insights.ExportInsightsDataRequest.pb( + contact_center_insights.ExportInsightsDataRequest() ) transcode.return_value = { "method": "post", @@ -20776,15 +23635,19 @@ def test_delete_analysis_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.DeleteAnalysisRequest() + request = contact_center_insights.ExportInsightsDataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_analysis( + client.export_insights_data( request, metadata=[ ("key", "val"), @@ -20793,10 +23656,12 @@ def test_delete_analysis_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.DeleteAnalysisRequest +def test_export_insights_data_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.ExportInsightsDataRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20804,9 +23669,7 @@ def test_delete_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20818,10 +23681,10 @@ def test_delete_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_analysis(request) + client.export_insights_data(request) -def test_delete_analysis_rest_flattened(): +def test_export_insights_data_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20830,40 +23693,38 @@ def test_delete_analysis_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_analysis(**mock_args) + client.export_insights_data(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" + "%s/v1/{parent=projects/*/locations/*}/insightsdata:export" % client.transport._host, args[1], ) -def test_delete_analysis_rest_flattened_error(transport: str = "rest"): +def test_export_insights_data_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20872,13 +23733,13 @@ def test_delete_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_analysis( - contact_center_insights.DeleteAnalysisRequest(), - name="name_value", + client.export_insights_data( + contact_center_insights.ExportInsightsDataRequest(), + parent="parent_value", ) -def test_delete_analysis_rest_error(): +def test_export_insights_data_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20887,11 +23748,11 @@ def test_delete_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.BulkAnalyzeConversationsRequest, + contact_center_insights.CreateIssueModelRequest, dict, ], ) -def test_bulk_analyze_conversations_rest(request_type): +def test_create_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20899,6 +23760,95 @@ def test_bulk_analyze_conversations_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["issue_model"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "issue_count": 1201, + "state": 1, + "input_data_config": { + "medium": 1, + "training_conversations_count": 3025, + "filter": "filter_value", + }, + "training_stats": { + "analyzed_conversations_count": 3021, + "unclassified_conversations_count": 3439, + "issue_stats": {}, + }, + "model_type": 1, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreateIssueModelRequest.meta.fields[ + "issue_model" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue_model"][field])): + del request_init["issue_model"][field][i][subfield] + else: + del request_init["issue_model"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20913,13 +23863,13 @@ def test_bulk_analyze_conversations_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_analyze_conversations(request) + response = client.create_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): +def test_create_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20934,8 +23884,7 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.bulk_analyze_conversations - in client._transport._wrapped_methods + client._transport.create_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -20944,11 +23893,11 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.bulk_analyze_conversations + client._transport.create_issue_model ] = mock_rpc request = {} - client.bulk_analyze_conversations(request) + client.create_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20957,22 +23906,20 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.bulk_analyze_conversations(request) + client.create_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_bulk_analyze_conversations_rest_required_fields( - request_type=contact_center_insights.BulkAnalyzeConversationsRequest, +def test_create_issue_model_rest_required_fields( + request_type=contact_center_insights.CreateIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" - request_init["analysis_percentage"] = 0.0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20983,27 +23930,21 @@ def test_bulk_analyze_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) + ).create_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" - jsonified_request["analysisPercentage"] = 0.20170000000000002 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) + ).create_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" - assert "analysisPercentage" in jsonified_request - assert jsonified_request["analysisPercentage"] == 0.20170000000000002 client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21037,33 +23978,32 @@ def test_bulk_analyze_conversations_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_analyze_conversations(request) + response = client.create_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_bulk_analyze_conversations_rest_unset_required_fields(): +def test_create_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.bulk_analyze_conversations._get_unset_required_fields({}) + unset_fields = transport.create_issue_model._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "filter", - "analysisPercentage", + "issueModel", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): +def test_create_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21078,16 +24018,14 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_bulk_analyze_conversations", + transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "pre_bulk_analyze_conversations", + transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( - contact_center_insights.BulkAnalyzeConversationsRequest() + pb_message = contact_center_insights.CreateIssueModelRequest.pb( + contact_center_insights.CreateIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21103,7 +24041,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.BulkAnalyzeConversationsRequest() + request = contact_center_insights.CreateIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21111,7 +24049,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.bulk_analyze_conversations( + client.create_issue_model( request, metadata=[ ("key", "val"), @@ -21123,9 +24061,9 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_bulk_analyze_conversations_rest_bad_request( +def test_create_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.BulkAnalyzeConversationsRequest, + request_type=contact_center_insights.CreateIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21145,10 +24083,10 @@ def test_bulk_analyze_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.bulk_analyze_conversations(request) + client.create_issue_model(request) -def test_bulk_analyze_conversations_rest_flattened(): +def test_create_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21165,8 +24103,7 @@ def test_bulk_analyze_conversations_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - filter="filter_value", - analysis_percentage=0.20170000000000002, + issue_model=resources.IssueModel(name="name_value"), ) mock_args.update(sample_request) @@ -21177,78 +24114,185 @@ def test_bulk_analyze_conversations_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.bulk_analyze_conversations(**mock_args) + client.create_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:bulkAnalyze" + "%s/v1/{parent=projects/*/locations/*}/issueModels" % client.transport._host, args[1], ) -def test_bulk_analyze_conversations_rest_flattened_error(transport: str = "rest"): +def test_create_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.bulk_analyze_conversations( - contact_center_insights.BulkAnalyzeConversationsRequest(), - parent="parent_value", - filter="filter_value", - analysis_percentage=0.20170000000000002, - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_issue_model( + contact_center_insights.CreateIssueModelRequest(), + parent="parent_value", + issue_model=resources.IssueModel(name="name_value"), + ) + + +def test_create_issue_model_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateIssueModelRequest, + dict, + ], +) +def test_update_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } + request_init["issue_model"] = { + "name": "projects/sample1/locations/sample2/issueModels/sample3", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "issue_count": 1201, + "state": 1, + "input_data_config": { + "medium": 1, + "training_conversations_count": 3025, + "filter": "filter_value", + }, + "training_stats": { + "analyzed_conversations_count": 3021, + "unclassified_conversations_count": 3439, + "issue_stats": {}, + }, + "model_type": 1, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateIssueModelRequest.meta.fields[ + "issue_model" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_bulk_analyze_conversations_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.BulkDeleteConversationsRequest, - dict, - ], -) -def test_bulk_delete_conversations_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue_model"][field])): + del request_init["issue_model"][field][i][subfield] + else: + del request_init["issue_model"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel( + name="name_value", + display_name="display_name_value", + issue_count=1201, + state=resources.IssueModel.State.UNDEPLOYED, + model_type=resources.IssueModel.ModelType.TYPE_V1, + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_delete_conversations(request) + response = client.update_issue_model(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.IssueModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.issue_count == 1201 + assert response.state == resources.IssueModel.State.UNDEPLOYED + assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 + assert response.language_code == "language_code_value" -def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): +def test_update_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21263,8 +24307,7 @@ def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.bulk_delete_conversations - in client._transport._wrapped_methods + client._transport.update_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -21273,33 +24316,28 @@ def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.bulk_delete_conversations + client._transport.update_issue_model ] = mock_rpc request = {} - client.bulk_delete_conversations(request) + client.update_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.bulk_delete_conversations(request) + client.update_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_bulk_delete_conversations_rest_required_fields( - request_type=contact_center_insights.BulkDeleteConversationsRequest, +def test_update_issue_model_rest_required_fields( + request_type=contact_center_insights.UpdateIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21310,21 +24348,19 @@ def test_bulk_delete_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) + ).update_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) + ).update_issue_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21333,7 +24369,7 @@ def test_bulk_delete_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21345,7 +24381,7 @@ def test_bulk_delete_conversations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21353,29 +24389,32 @@ def test_bulk_delete_conversations_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_delete_conversations(request) + response = client.update_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_bulk_delete_conversations_rest_unset_required_fields(): +def test_update_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.bulk_delete_conversations._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.update_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("issueModel",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_delete_conversations_rest_interceptors(null_interceptor): +def test_update_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21388,17 +24427,14 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_bulk_delete_conversations", + transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( - contact_center_insights.BulkDeleteConversationsRequest() + pb_message = contact_center_insights.UpdateIssueModelRequest.pb( + contact_center_insights.UpdateIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21410,19 +24446,17 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) - request = contact_center_insights.BulkDeleteConversationsRequest() + request = contact_center_insights.UpdateIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.IssueModel() - client.bulk_delete_conversations( + client.update_issue_model( request, metadata=[ ("key", "val"), @@ -21434,9 +24468,9 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_bulk_delete_conversations_rest_bad_request( +def test_update_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.BulkDeleteConversationsRequest, + request_type=contact_center_insights.UpdateIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21444,7 +24478,11 @@ def test_bulk_delete_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21456,10 +24494,10 @@ def test_bulk_delete_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.bulk_delete_conversations(request) + client.update_issue_model(request) -def test_bulk_delete_conversations_rest_flattened(): +def test_update_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21468,39 +24506,45 @@ def test_bulk_delete_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - filter="filter_value", + issue_model=resources.IssueModel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.bulk_delete_conversations(**mock_args) + client.update_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:bulkDelete" + "%s/v1/{issue_model.name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest"): +def test_update_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21509,14 +24553,14 @@ def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.bulk_delete_conversations( - contact_center_insights.BulkDeleteConversationsRequest(), - parent="parent_value", - filter="filter_value", + client.update_issue_model( + contact_center_insights.UpdateIssueModelRequest(), + issue_model=resources.IssueModel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_bulk_delete_conversations_rest_error(): +def test_update_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21525,39 +24569,54 @@ def test_bulk_delete_conversations_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.IngestConversationsRequest, + contact_center_insights.GetIssueModelRequest, dict, ], ) -def test_ingest_conversations_rest(request_type): +def test_get_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel( + name="name_value", + display_name="display_name_value", + issue_count=1201, + state=resources.IssueModel.State.UNDEPLOYED, + model_type=resources.IssueModel.ModelType.TYPE_V1, + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ingest_conversations(request) + response = client.get_issue_model(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.IssueModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.issue_count == 1201 + assert response.state == resources.IssueModel.State.UNDEPLOYED + assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 + assert response.language_code == "language_code_value" -def test_ingest_conversations_rest_use_cached_wrapped_rpc(): +def test_get_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21571,43 +24630,35 @@ def test_ingest_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.ingest_conversations in client._transport._wrapped_methods - ) + assert client._transport.get_issue_model in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.ingest_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue_model] = mock_rpc request = {} - client.ingest_conversations(request) + client.get_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.ingest_conversations(request) + client.get_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_ingest_conversations_rest_required_fields( - request_type=contact_center_insights.IngestConversationsRequest, +def test_get_issue_model_rest_required_fields( + request_type=contact_center_insights.GetIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21618,21 +24669,21 @@ def test_ingest_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ingest_conversations._get_unset_required_fields(jsonified_request) + ).get_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ingest_conversations._get_unset_required_fields(jsonified_request) + ).get_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21641,7 +24692,7 @@ def test_ingest_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21653,37 +24704,39 @@ def test_ingest_conversations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ingest_conversations(request) + response = client.get_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_ingest_conversations_rest_unset_required_fields(): +def test_get_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.ingest_conversations._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.get_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_ingest_conversations_rest_interceptors(null_interceptor): +def test_get_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21696,16 +24749,14 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" + transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.IngestConversationsRequest.pb( - contact_center_insights.IngestConversationsRequest() + pb_message = contact_center_insights.GetIssueModelRequest.pb( + contact_center_insights.GetIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21717,19 +24768,17 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) - request = contact_center_insights.IngestConversationsRequest() + request = contact_center_insights.GetIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.IssueModel() - client.ingest_conversations( + client.get_issue_model( request, metadata=[ ("key", "val"), @@ -21741,9 +24790,8 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_ingest_conversations_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.IngestConversationsRequest, +def test_get_issue_model_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetIssueModelRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21751,7 +24799,7 @@ def test_ingest_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21763,10 +24811,10 @@ def test_ingest_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.ingest_conversations(request) + client.get_issue_model(request) -def test_ingest_conversations_rest_flattened(): +def test_get_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21775,38 +24823,42 @@ def test_ingest_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.ingest_conversations(**mock_args) + client.get_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:ingest" + "%s/v1/{name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): +def test_get_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21815,13 +24867,13 @@ def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ingest_conversations( - contact_center_insights.IngestConversationsRequest(), - parent="parent_value", + client.get_issue_model( + contact_center_insights.GetIssueModelRequest(), + name="name_value", ) -def test_ingest_conversations_rest_error(): +def test_get_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21830,11 +24882,11 @@ def test_ingest_conversations_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ExportInsightsDataRequest, + contact_center_insights.ListIssueModelsRequest, dict, ], ) -def test_export_insights_data_rest(request_type): +def test_list_issue_models_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21847,22 +24899,24 @@ def test_export_insights_data_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_insights_data(request) + response = client.list_issue_models(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, contact_center_insights.ListIssueModelsResponse) -def test_export_insights_data_rest_use_cached_wrapped_rpc(): +def test_list_issue_models_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21876,9 +24930,7 @@ def test_export_insights_data_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.export_insights_data in client._transport._wrapped_methods - ) + assert client._transport.list_issue_models in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -21886,28 +24938,24 @@ def test_export_insights_data_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.export_insights_data + client._transport.list_issue_models ] = mock_rpc request = {} - client.export_insights_data(request) + client.list_issue_models(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_insights_data(request) + client.list_issue_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_insights_data_rest_required_fields( - request_type=contact_center_insights.ExportInsightsDataRequest, +def test_list_issue_models_rest_required_fields( + request_type=contact_center_insights.ListIssueModelsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -21923,7 +24971,7 @@ def test_export_insights_data_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_insights_data._get_unset_required_fields(jsonified_request) + ).list_issue_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21932,7 +24980,7 @@ def test_export_insights_data_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_insights_data._get_unset_required_fields(jsonified_request) + ).list_issue_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -21946,7 +24994,7 @@ def test_export_insights_data_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21958,37 +25006,41 @@ def test_export_insights_data_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_insights_data(request) + response = client.list_issue_models(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_insights_data_rest_unset_required_fields(): +def test_list_issue_models_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_insights_data._get_unset_required_fields({}) + unset_fields = transport.list_issue_models._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_insights_data_rest_interceptors(null_interceptor): +def test_list_issue_models_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22001,16 +25053,14 @@ def test_export_insights_data_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" + transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" + transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ExportInsightsDataRequest.pb( - contact_center_insights.ExportInsightsDataRequest() + pb_message = contact_center_insights.ListIssueModelsRequest.pb( + contact_center_insights.ListIssueModelsRequest() ) transcode.return_value = { "method": "post", @@ -22022,19 +25072,21 @@ def test_export_insights_data_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + contact_center_insights.ListIssueModelsResponse.to_json( + contact_center_insights.ListIssueModelsResponse() + ) ) - request = contact_center_insights.ExportInsightsDataRequest() + request = contact_center_insights.ListIssueModelsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = contact_center_insights.ListIssueModelsResponse() - client.export_insights_data( + client.list_issue_models( request, metadata=[ ("key", "val"), @@ -22046,9 +25098,8 @@ def test_export_insights_data_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_insights_data_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.ExportInsightsDataRequest, +def test_list_issue_models_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListIssueModelsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22068,10 +25119,10 @@ def test_export_insights_data_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_insights_data(request) + client.list_issue_models(request) -def test_export_insights_data_rest_flattened(): +def test_list_issue_models_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22080,7 +25131,7 @@ def test_export_insights_data_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -22094,148 +25145,61 @@ def test_export_insights_data_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.export_insights_data(**mock_args) + client.list_issue_models(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/insightsdata:export" + "%s/v1/{parent=projects/*/locations/*}/issueModels" % client.transport._host, args[1], ) -def test_export_insights_data_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_insights_data( - contact_center_insights.ExportInsightsDataRequest(), - parent="parent_value", - ) - - -def test_export_insights_data_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CreateIssueModelRequest, - dict, - ], -) -def test_create_issue_model_rest(request_type): +def test_list_issue_models_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["issue_model"] = { - "name": "name_value", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "issue_count": 1201, - "state": 1, - "input_data_config": { - "medium": 1, - "training_conversations_count": 3025, - "filter": "filter_value", - }, - "training_stats": { - "analyzed_conversations_count": 3021, - "unclassified_conversations_count": 3439, - "issue_stats": {}, - }, - "model_type": 1, - "language_code": "language_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateIssueModelRequest.meta.fields[ - "issue_model" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_issue_models( + contact_center_insights.ListIssueModelsRequest(), + parent="parent_value", + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_list_issue_models_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue_model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeleteIssueModelRequest, + dict, + ], +) +def test_delete_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue_model"][field])): - del request_init["issue_model"][field][i][subfield] - else: - del request_init["issue_model"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22250,13 +25214,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_issue_model(request) + response = client.delete_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_issue_model_rest_use_cached_wrapped_rpc(): +def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22271,7 +25235,7 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_issue_model in client._transport._wrapped_methods + client._transport.delete_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -22280,11 +25244,11 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_issue_model + client._transport.delete_issue_model ] = mock_rpc request = {} - client.create_issue_model(request) + client.delete_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22293,20 +25257,20 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_issue_model(request) + client.delete_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_issue_model_rest_required_fields( - request_type=contact_center_insights.CreateIssueModelRequest, +def test_delete_issue_model_rest_required_fields( + request_type=contact_center_insights.DeleteIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22317,21 +25281,21 @@ def test_create_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_issue_model._get_unset_required_fields(jsonified_request) + ).delete_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_issue_model._get_unset_required_fields(jsonified_request) + ).delete_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22352,10 +25316,9 @@ def test_create_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22365,32 +25328,24 @@ def test_create_issue_model_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_issue_model(request) + response = client.delete_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_issue_model_rest_unset_required_fields(): +def test_delete_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "issueModel", - ) - ) - ) + unset_fields = transport.delete_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_issue_model_rest_interceptors(null_interceptor): +def test_delete_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22405,14 +25360,14 @@ def test_create_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateIssueModelRequest.pb( - contact_center_insights.CreateIssueModelRequest() + pb_message = contact_center_insights.DeleteIssueModelRequest.pb( + contact_center_insights.DeleteIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -22428,7 +25383,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.CreateIssueModelRequest() + request = contact_center_insights.DeleteIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22436,7 +25391,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_issue_model( + client.delete_issue_model( request, metadata=[ ("key", "val"), @@ -22448,9 +25403,9 @@ def test_create_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_issue_model_rest_bad_request( +def test_delete_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreateIssueModelRequest, + request_type=contact_center_insights.DeleteIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22458,7 +25413,7 @@ def test_create_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22470,10 +25425,10 @@ def test_create_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_issue_model(request) + client.delete_issue_model(request) -def test_create_issue_model_rest_flattened(): +def test_delete_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22485,12 +25440,13 @@ def test_create_issue_model_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - issue_model=resources.IssueModel(name="name_value"), + name="name_value", ) mock_args.update(sample_request) @@ -22501,20 +25457,20 @@ def test_create_issue_model_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_issue_model(**mock_args) + client.delete_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/issueModels" + "%s/v1/{name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_create_issue_model_rest_flattened_error(transport: str = "rest"): +def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22523,163 +25479,54 @@ def test_create_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_issue_model( - contact_center_insights.CreateIssueModelRequest(), - parent="parent_value", - issue_model=resources.IssueModel(name="name_value"), + client.delete_issue_model( + contact_center_insights.DeleteIssueModelRequest(), + name="name_value", ) -def test_create_issue_model_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateIssueModelRequest, - dict, - ], -) -def test_update_issue_model_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } - } - request_init["issue_model"] = { - "name": "projects/sample1/locations/sample2/issueModels/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "issue_count": 1201, - "state": 1, - "input_data_config": { - "medium": 1, - "training_conversations_count": 3025, - "filter": "filter_value", - }, - "training_stats": { - "analyzed_conversations_count": 3021, - "unclassified_conversations_count": 3439, - "issue_stats": {}, - }, - "model_type": 1, - "language_code": "language_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateIssueModelRequest.meta.fields[ - "issue_model" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue_model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_delete_issue_model_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue_model"][field])): - del request_init["issue_model"][field][i][subfield] - else: - del request_init["issue_model"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeployIssueModelRequest, + dict, + ], +) +def test_deploy_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel( - name="name_value", - display_name="display_name_value", - issue_count=1201, - state=resources.IssueModel.State.UNDEPLOYED, - model_type=resources.IssueModel.ModelType.TYPE_V1, - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue_model(request) + response = client.deploy_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.IssueModel) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.issue_count == 1201 - assert response.state == resources.IssueModel.State.UNDEPLOYED - assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_update_issue_model_rest_use_cached_wrapped_rpc(): +def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22694,7 +25541,7 @@ def test_update_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_issue_model in client._transport._wrapped_methods + client._transport.deploy_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -22703,28 +25550,33 @@ def test_update_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_issue_model + client._transport.deploy_issue_model ] = mock_rpc request = {} - client.update_issue_model(request) + client.deploy_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue_model(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_issue_model_rest_required_fields( - request_type=contact_center_insights.UpdateIssueModelRequest, +def test_deploy_issue_model_rest_required_fields( + request_type=contact_center_insights.DeployIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22735,19 +25587,21 @@ def test_update_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue_model._get_unset_required_fields(jsonified_request) + ).deploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue_model._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).deploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22756,7 +25610,7 @@ def test_update_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22768,7 +25622,7 @@ def test_update_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -22776,32 +25630,29 @@ def test_update_issue_model_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue_model(request) + response = client.deploy_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_issue_model_rest_unset_required_fields(): +def test_deploy_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("issueModel",))) + unset_fields = transport.deploy_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_issue_model_rest_interceptors(null_interceptor): +def test_deploy_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22814,14 +25665,16 @@ def test_update_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateIssueModelRequest.pb( - contact_center_insights.UpdateIssueModelRequest() + pb_message = contact_center_insights.DeployIssueModelRequest.pb( + contact_center_insights.DeployIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -22833,17 +25686,19 @@ def test_update_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.UpdateIssueModelRequest() + request = contact_center_insights.DeployIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.IssueModel() + post.return_value = operations_pb2.Operation() - client.update_issue_model( + client.deploy_issue_model( request, metadata=[ ("key", "val"), @@ -22855,9 +25710,9 @@ def test_update_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_issue_model_rest_bad_request( +def test_deploy_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.UpdateIssueModelRequest, + request_type=contact_center_insights.DeployIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22865,11 +25720,7 @@ def test_update_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22881,10 +25732,10 @@ def test_update_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_issue_model(request) + client.deploy_issue_model(request) -def test_update_issue_model_rest_flattened(): +def test_deploy_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22893,45 +25744,40 @@ def test_update_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } + "name": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - issue_model=resources.IssueModel(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_issue_model(**mock_args) + client.deploy_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue_model.name=projects/*/locations/*/issueModels/*}" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:deploy" % client.transport._host, args[1], ) -def test_update_issue_model_rest_flattened_error(transport: str = "rest"): +def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22940,14 +25786,13 @@ def test_update_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue_model( - contact_center_insights.UpdateIssueModelRequest(), - issue_model=resources.IssueModel(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.deploy_issue_model( + contact_center_insights.DeployIssueModelRequest(), + name="name_value", ) -def test_update_issue_model_rest_error(): +def test_deploy_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22956,11 +25801,11 @@ def test_update_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueModelRequest, + contact_center_insights.UndeployIssueModelRequest, dict, ], ) -def test_get_issue_model_rest(request_type): +def test_undeploy_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22973,37 +25818,22 @@ def test_get_issue_model_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel( - name="name_value", - display_name="display_name_value", - issue_count=1201, - state=resources.IssueModel.State.UNDEPLOYED, - model_type=resources.IssueModel.ModelType.TYPE_V1, - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue_model(request) + response = client.undeploy_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.IssueModel) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.issue_count == 1201 - assert response.state == resources.IssueModel.State.UNDEPLOYED - assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_get_issue_model_rest_use_cached_wrapped_rpc(): +def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23017,30 +25847,38 @@ def test_get_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue_model in client._transport._wrapped_methods + assert ( + client._transport.undeploy_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue_model] = mock_rpc + client._transport._wrapped_methods[ + client._transport.undeploy_issue_model + ] = mock_rpc request = {} - client.get_issue_model(request) + client.undeploy_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue_model(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undeploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_issue_model_rest_required_fields( - request_type=contact_center_insights.GetIssueModelRequest, +def test_undeploy_issue_model_rest_required_fields( + request_type=contact_center_insights.UndeployIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -23056,7 +25894,7 @@ def test_get_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue_model._get_unset_required_fields(jsonified_request) + ).undeploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23065,7 +25903,7 @@ def test_get_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue_model._get_unset_required_fields(jsonified_request) + ).undeploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23079,7 +25917,7 @@ def test_get_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23091,39 +25929,37 @@ def test_get_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue_model(request) + response = client.undeploy_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_issue_model_rest_unset_required_fields(): +def test_undeploy_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_issue_model._get_unset_required_fields({}) + unset_fields = transport.undeploy_issue_model._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_issue_model_rest_interceptors(null_interceptor): +def test_undeploy_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23136,14 +25972,16 @@ def test_get_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetIssueModelRequest.pb( - contact_center_insights.GetIssueModelRequest() + pb_message = contact_center_insights.UndeployIssueModelRequest.pb( + contact_center_insights.UndeployIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23155,17 +25993,19 @@ def test_get_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.GetIssueModelRequest() + request = contact_center_insights.UndeployIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.IssueModel() + post.return_value = operations_pb2.Operation() - client.get_issue_model( + client.undeploy_issue_model( request, metadata=[ ("key", "val"), @@ -23177,8 +26017,9 @@ def test_get_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_issue_model_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetIssueModelRequest +def test_undeploy_issue_model_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UndeployIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23198,10 +26039,10 @@ def test_get_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_issue_model(request) + client.undeploy_issue_model(request) -def test_get_issue_model_rest_flattened(): +def test_undeploy_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23210,7 +26051,7 @@ def test_get_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -23226,26 +26067,24 @@ def test_get_issue_model_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_issue_model(**mock_args) + client.undeploy_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:undeploy" % client.transport._host, args[1], ) -def test_get_issue_model_rest_flattened_error(transport: str = "rest"): +def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23254,13 +26093,13 @@ def test_get_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue_model( - contact_center_insights.GetIssueModelRequest(), + client.undeploy_issue_model( + contact_center_insights.UndeployIssueModelRequest(), name="name_value", ) -def test_get_issue_model_rest_error(): +def test_undeploy_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23269,41 +26108,39 @@ def test_get_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssueModelsRequest, + contact_center_insights.ExportIssueModelRequest, dict, ], ) -def test_list_issue_models_rest(request_type): +def test_export_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issue_models(request) + response = client.export_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssueModelsResponse) + assert response.operation.name == "operations/spam" -def test_list_issue_models_rest_use_cached_wrapped_rpc(): +def test_export_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23317,7 +26154,9 @@ def test_list_issue_models_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issue_models in client._transport._wrapped_methods + assert ( + client._transport.export_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -23325,29 +26164,33 @@ def test_list_issue_models_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_issue_models + client._transport.export_issue_model ] = mock_rpc request = {} - client.list_issue_models(request) + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issue_models(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_issue_models_rest_required_fields( - request_type=contact_center_insights.ListIssueModelsRequest, +def test_export_issue_model_rest_required_fields( + request_type=contact_center_insights.ExportIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23358,21 +26201,21 @@ def test_list_issue_models_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issue_models._get_unset_required_fields(jsonified_request) + ).export_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issue_models._get_unset_required_fields(jsonified_request) + ).export_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23381,7 +26224,7 @@ def test_list_issue_models_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23393,41 +26236,37 @@ def test_list_issue_models_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issue_models(request) + response = client.export_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_issue_models_rest_unset_required_fields(): +def test_export_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_issue_models._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.export_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_issue_models_rest_interceptors(null_interceptor): +def test_export_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23440,14 +26279,16 @@ def test_list_issue_models_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_export_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" + transports.ContactCenterInsightsRestInterceptor, "pre_export_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListIssueModelsRequest.pb( - contact_center_insights.ListIssueModelsRequest() + pb_message = contact_center_insights.ExportIssueModelRequest.pb( + contact_center_insights.ExportIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23459,21 +26300,19 @@ def test_list_issue_models_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListIssueModelsResponse.to_json( - contact_center_insights.ListIssueModelsResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.ListIssueModelsRequest() + request = contact_center_insights.ExportIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListIssueModelsResponse() + post.return_value = operations_pb2.Operation() - client.list_issue_models( + client.export_issue_model( request, metadata=[ ("key", "val"), @@ -23485,8 +26324,9 @@ def test_list_issue_models_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_issue_models_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListIssueModelsRequest +def test_export_issue_model_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.ExportIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23494,7 +26334,7 @@ def test_list_issue_models_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23506,10 +26346,10 @@ def test_list_issue_models_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_issue_models(request) + client.export_issue_model(request) -def test_list_issue_models_rest_flattened(): +def test_export_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23518,40 +26358,40 @@ def test_list_issue_models_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_issue_models(**mock_args) + client.export_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/issueModels" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:export" % client.transport._host, args[1], ) -def test_list_issue_models_rest_flattened_error(transport: str = "rest"): +def test_export_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23560,13 +26400,13 @@ def test_list_issue_models_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issue_models( - contact_center_insights.ListIssueModelsRequest(), - parent="parent_value", + client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), + name="name_value", ) -def test_list_issue_models_rest_error(): +def test_export_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23575,18 +26415,18 @@ def test_list_issue_models_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueModelRequest, + contact_center_insights.ImportIssueModelRequest, dict, ], ) -def test_delete_issue_model_rest(request_type): +def test_import_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23601,13 +26441,13 @@ def test_delete_issue_model_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue_model(request) + response = client.import_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_issue_model_rest_use_cached_wrapped_rpc(): +def test_import_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23622,7 +26462,7 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_issue_model in client._transport._wrapped_methods + client._transport.import_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -23631,11 +26471,11 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_issue_model + client._transport.import_issue_model ] = mock_rpc request = {} - client.delete_issue_model(request) + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23644,20 +26484,20 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_issue_model(request) + client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_issue_model_rest_required_fields( - request_type=contact_center_insights.DeleteIssueModelRequest, +def test_import_issue_model_rest_required_fields( + request_type=contact_center_insights.ImportIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23668,21 +26508,21 @@ def test_delete_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue_model._get_unset_required_fields(jsonified_request) + ).import_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue_model._get_unset_required_fields(jsonified_request) + ).import_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23703,9 +26543,10 @@ def test_delete_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -23715,24 +26556,24 @@ def test_delete_issue_model_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue_model(request) + response = client.import_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_issue_model_rest_unset_required_fields(): +def test_import_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_issue_model_rest_interceptors(null_interceptor): +def test_import_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23747,14 +26588,14 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_import_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_import_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.DeleteIssueModelRequest.pb( - contact_center_insights.DeleteIssueModelRequest() + pb_message = contact_center_insights.ImportIssueModelRequest.pb( + contact_center_insights.ImportIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23770,7 +26611,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.DeleteIssueModelRequest() + request = contact_center_insights.ImportIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -23778,7 +26619,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_issue_model( + client.import_issue_model( request, metadata=[ ("key", "val"), @@ -23790,9 +26631,9 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_issue_model_rest_bad_request( +def test_import_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.DeleteIssueModelRequest, + request_type=contact_center_insights.ImportIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23800,7 +26641,7 @@ def test_delete_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23812,10 +26653,10 @@ def test_delete_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_issue_model(request) + client.import_issue_model(request) -def test_delete_issue_model_rest_flattened(): +def test_import_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23827,13 +26668,11 @@ def test_delete_issue_model_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23844,20 +26683,20 @@ def test_delete_issue_model_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_issue_model(**mock_args) + client.import_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}" + "%s/v1/{parent=projects/*/locations/*}/issueModels:import" % client.transport._host, args[1], ) -def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): +def test_import_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23866,13 +26705,13 @@ def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue_model( - contact_center_insights.DeleteIssueModelRequest(), - name="name_value", + client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), + parent="parent_value", ) -def test_delete_issue_model_rest_error(): +def test_import_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23881,39 +26720,52 @@ def test_delete_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeployIssueModelRequest, + contact_center_insights.GetIssueRequest, dict, ], ) -def test_deploy_issue_model_rest(request_type): +def test_get_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.deploy_issue_model(request) + response = client.get_issue(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): +def test_get_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23927,38 +26779,30 @@ def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.deploy_issue_model in client._transport._wrapped_methods - ) + assert client._transport.get_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.deploy_issue_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc request = {} - client.deploy_issue_model(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_issue_model(request) + client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_deploy_issue_model_rest_required_fields( - request_type=contact_center_insights.DeployIssueModelRequest, +def test_get_issue_rest_required_fields( + request_type=contact_center_insights.GetIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -23974,7 +26818,7 @@ def test_deploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).deploy_issue_model._get_unset_required_fields(jsonified_request) + ).get_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23983,7 +26827,7 @@ def test_deploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).deploy_issue_model._get_unset_required_fields(jsonified_request) + ).get_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23997,7 +26841,7 @@ def test_deploy_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24009,37 +26853,39 @@ def test_deploy_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.deploy_issue_model(request) + response = client.get_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_deploy_issue_model_rest_unset_required_fields(): +def test_get_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.deploy_issue_model._get_unset_required_fields({}) + unset_fields = transport.get_issue._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deploy_issue_model_rest_interceptors(null_interceptor): +def test_get_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24052,16 +26898,14 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_get_issue" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.DeployIssueModelRequest.pb( - contact_center_insights.DeployIssueModelRequest() + pb_message = contact_center_insights.GetIssueRequest.pb( + contact_center_insights.GetIssueRequest() ) transcode.return_value = { "method": "post", @@ -24073,19 +26917,17 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.Issue.to_json(resources.Issue()) - request = contact_center_insights.DeployIssueModelRequest() + request = contact_center_insights.GetIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.Issue() - client.deploy_issue_model( + client.get_issue( request, metadata=[ ("key", "val"), @@ -24097,9 +26939,8 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_deploy_issue_model_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeployIssueModelRequest, +def test_get_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24107,7 +26948,9 @@ def test_deploy_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24119,10 +26962,10 @@ def test_deploy_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.deploy_issue_model(request) + client.get_issue(request) -def test_deploy_issue_model_rest_flattened(): +def test_get_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24131,11 +26974,11 @@ def test_deploy_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" } # get truthy value for each flattened field @@ -24147,24 +26990,26 @@ def test_deploy_issue_model_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.deploy_issue_model(**mock_args) + client.get_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}:deploy" + "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): +def test_get_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24173,13 +27018,13 @@ def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.deploy_issue_model( - contact_center_insights.DeployIssueModelRequest(), + client.get_issue( + contact_center_insights.GetIssueRequest(), name="name_value", ) -def test_deploy_issue_model_rest_error(): +def test_get_issue_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24188,39 +27033,41 @@ def test_deploy_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UndeployIssueModelRequest, + contact_center_insights.ListIssuesRequest, dict, ], ) -def test_undeploy_issue_model_rest(request_type): +def test_list_issues_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undeploy_issue_model(request) + response = client.list_issues(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, contact_center_insights.ListIssuesResponse) -def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): +def test_list_issues_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24234,43 +27081,35 @@ def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.undeploy_issue_model in client._transport._wrapped_methods - ) + assert client._transport.list_issues in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.undeploy_issue_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc request = {} - client.undeploy_issue_model(request) + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_issue_model(request) + client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_undeploy_issue_model_rest_required_fields( - request_type=contact_center_insights.UndeployIssueModelRequest, +def test_list_issues_rest_required_fields( + request_type=contact_center_insights.ListIssuesRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24281,21 +27120,21 @@ def test_undeploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undeploy_issue_model._get_unset_required_fields(jsonified_request) + ).list_issues._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undeploy_issue_model._get_unset_required_fields(jsonified_request) + ).list_issues._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24304,7 +27143,7 @@ def test_undeploy_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24316,37 +27155,39 @@ def test_undeploy_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undeploy_issue_model(request) + response = client.list_issues(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_undeploy_issue_model_rest_unset_required_fields(): +def test_list_issues_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.undeploy_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_issues._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undeploy_issue_model_rest_interceptors(null_interceptor): +def test_list_issues_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24359,16 +27200,14 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_list_issues" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UndeployIssueModelRequest.pb( - contact_center_insights.UndeployIssueModelRequest() + pb_message = contact_center_insights.ListIssuesRequest.pb( + contact_center_insights.ListIssuesRequest() ) transcode.return_value = { "method": "post", @@ -24380,19 +27219,19 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = contact_center_insights.ListIssuesResponse.to_json( + contact_center_insights.ListIssuesResponse() ) - request = contact_center_insights.UndeployIssueModelRequest() + request = contact_center_insights.ListIssuesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = contact_center_insights.ListIssuesResponse() - client.undeploy_issue_model( + client.list_issues( request, metadata=[ ("key", "val"), @@ -24404,9 +27243,8 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_undeploy_issue_model_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UndeployIssueModelRequest, +def test_list_issues_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListIssuesRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24414,7 +27252,7 @@ def test_undeploy_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24426,10 +27264,10 @@ def test_undeploy_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.undeploy_issue_model(request) + client.list_issues(request) -def test_undeploy_issue_model_rest_flattened(): +def test_list_issues_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24438,40 +27276,42 @@ def test_undeploy_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" + "parent": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.undeploy_issue_model(**mock_args) + client.list_issues(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}:undeploy" + "%s/v1/{parent=projects/*/locations/*/issueModels/*}/issues" % client.transport._host, args[1], ) -def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): +def test_list_issues_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24480,13 +27320,13 @@ def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.undeploy_issue_model( - contact_center_insights.UndeployIssueModelRequest(), - name="name_value", + client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) -def test_undeploy_issue_model_rest_error(): +def test_list_issues_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24495,11 +27335,11 @@ def test_undeploy_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueRequest, + contact_center_insights.UpdateIssueRequest, dict, ], ) -def test_get_issue_rest(request_type): +def test_update_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24507,8 +27347,85 @@ def test_get_issue_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } + request_init["issue"] = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "sample_utterances": ["sample_utterances_value1", "sample_utterances_value2"], + "display_description": "display_description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateIssueRequest.meta.fields["issue"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue"][field])): + del request_init["issue"][field][i][subfield] + else: + del request_init["issue"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -24518,6 +27435,7 @@ def test_get_issue_rest(request_type): name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) # Wrap the value into a proper Response obj @@ -24529,16 +27447,17 @@ def test_get_issue_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue(request) + response = client.update_issue(request) # Establish that the response is the type that we expect. assert isinstance(response, resources.Issue) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_get_issue_rest_use_cached_wrapped_rpc(): +def test_update_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24552,35 +27471,34 @@ def test_get_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue in client._transport._wrapped_methods + assert client._transport.update_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc request = {} - client.get_issue(request) + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue(request) + client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_issue_rest_required_fields( - request_type=contact_center_insights.GetIssueRequest, +def test_update_issue_rest_required_fields( + request_type=contact_center_insights.UpdateIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24591,21 +27509,19 @@ def test_get_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue._get_unset_required_fields(jsonified_request) + ).update_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue._get_unset_required_fields(jsonified_request) + ).update_issue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24626,9 +27542,10 @@ def test_get_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -24641,24 +27558,24 @@ def test_get_issue_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue(request) + response = client.update_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_issue_rest_unset_required_fields(): +def test_update_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_issue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("issue",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_issue_rest_interceptors(null_interceptor): +def test_update_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24671,14 +27588,14 @@ def test_get_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_issue" + transports.ContactCenterInsightsRestInterceptor, "post_update_issue" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" + transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetIssueRequest.pb( - contact_center_insights.GetIssueRequest() + pb_message = contact_center_insights.UpdateIssueRequest.pb( + contact_center_insights.UpdateIssueRequest() ) transcode.return_value = { "method": "post", @@ -24692,7 +27609,7 @@ def test_get_issue_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = resources.Issue.to_json(resources.Issue()) - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.UpdateIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24700,7 +27617,7 @@ def test_get_issue_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = resources.Issue() - client.get_issue( + client.update_issue( request, metadata=[ ("key", "val"), @@ -24712,8 +27629,8 @@ def test_get_issue_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetIssueRequest +def test_update_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.UpdateIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24722,7 +27639,9 @@ def test_get_issue_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } request = request_type(**request_init) @@ -24735,10 +27654,10 @@ def test_get_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_issue(request) + client.update_issue(request) -def test_get_issue_rest_flattened(): +def test_update_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24751,12 +27670,15 @@ def test_get_issue_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -24769,20 +27691,20 @@ def test_get_issue_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_issue(**mock_args) + client.update_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{issue.name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_get_issue_rest_flattened_error(transport: str = "rest"): +def test_update_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24791,13 +27713,14 @@ def test_get_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue( - contact_center_insights.GetIssueRequest(), - name="name_value", + client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_issue_rest_error(): +def test_update_issue_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24806,41 +27729,41 @@ def test_get_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssuesRequest, + contact_center_insights.DeleteIssueRequest, dict, ], ) -def test_list_issues_rest(request_type): +def test_delete_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issues(request) + response = client.delete_issue(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert response is None -def test_list_issues_rest_use_cached_wrapped_rpc(): +def test_delete_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24854,35 +27777,35 @@ def test_list_issues_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issues in client._transport._wrapped_methods + assert client._transport.delete_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc request = {} - client.list_issues(request) + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issues(request) + client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_issues_rest_required_fields( - request_type=contact_center_insights.ListIssuesRequest, +def test_delete_issue_rest_required_fields( + request_type=contact_center_insights.DeleteIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24893,21 +27816,21 @@ def test_list_issues_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issues._get_unset_required_fields(jsonified_request) + ).delete_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issues._get_unset_required_fields(jsonified_request) + ).delete_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24916,7 +27839,7 @@ def test_list_issues_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24928,39 +27851,36 @@ def test_list_issues_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issues(request) + response = client.delete_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_issues_rest_unset_required_fields(): +def test_delete_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_issues._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.delete_issue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_issues_rest_interceptors(null_interceptor): +def test_delete_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24973,14 +27893,11 @@ def test_list_issues_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_issues" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.ListIssuesRequest.pb( - contact_center_insights.ListIssuesRequest() + pb_message = contact_center_insights.DeleteIssueRequest.pb( + contact_center_insights.DeleteIssueRequest() ) transcode.return_value = { "method": "post", @@ -24992,19 +27909,15 @@ def test_list_issues_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = contact_center_insights.ListIssuesResponse.to_json( - contact_center_insights.ListIssuesResponse() - ) - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.DeleteIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListIssuesResponse() - client.list_issues( + client.delete_issue( request, metadata=[ ("key", "val"), @@ -25013,11 +27926,10 @@ def test_list_issues_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_issues_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListIssuesRequest +def test_delete_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.DeleteIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25025,7 +27937,9 @@ def test_list_issues_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25037,10 +27951,10 @@ def test_list_issues_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_issues(request) + client.delete_issue(request) -def test_list_issues_rest_flattened(): +def test_delete_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25049,42 +27963,40 @@ def test_list_issues_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_issues(**mock_args) + client.delete_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/issueModels/*}/issues" + "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_list_issues_rest_flattened_error(transport: str = "rest"): +def test_delete_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25093,141 +28005,62 @@ def test_list_issues_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issues( - contact_center_insights.ListIssuesRequest(), - parent="parent_value", - ) - - -def test_list_issues_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateIssueRequest, - dict, - ], -) -def test_update_issue_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } - } - request_init["issue"] = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "sample_utterances": ["sample_utterances_value1", "sample_utterances_value2"], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateIssueRequest.meta.fields["issue"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue"][field])): - del request_init["issue"][field][i][subfield] - else: - del request_init["issue"][field][subfield] +def test_delete_issue_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.CalculateIssueModelStatsRequest, + dict, + ], +) +def test_calculate_issue_model_stats_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue(request) + response = client.calculate_issue_model_stats(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) -def test_update_issue_rest_use_cached_wrapped_rpc(): +def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25241,34 +28074,40 @@ def test_update_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_issue in client._transport._wrapped_methods + assert ( + client._transport.calculate_issue_model_stats + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.calculate_issue_model_stats + ] = mock_rpc request = {} - client.update_issue(request) + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue(request) + client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_issue_rest_required_fields( - request_type=contact_center_insights.UpdateIssueRequest, +def test_calculate_issue_model_stats_rest_required_fields( + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["issue_model"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25279,19 +28118,21 @@ def test_update_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue._get_unset_required_fields(jsonified_request) + ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["issueModel"] = "issue_model_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "issueModel" in jsonified_request + assert jsonified_request["issueModel"] == "issue_model_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25300,7 +28141,7 @@ def test_update_issue_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Issue() + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25312,40 +28153,41 @@ def test_update_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue(request) + response = client.calculate_issue_model_stats(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_issue_rest_unset_required_fields(): +def test_calculate_issue_model_stats_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("issue",))) + unset_fields = transport.calculate_issue_model_stats._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("issueModel",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_issue_rest_interceptors(null_interceptor): +def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25358,14 +28200,16 @@ def test_update_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_issue" + transports.ContactCenterInsightsRestInterceptor, + "post_calculate_issue_model_stats", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" + transports.ContactCenterInsightsRestInterceptor, + "pre_calculate_issue_model_stats", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateIssueRequest.pb( - contact_center_insights.UpdateIssueRequest() + pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( + contact_center_insights.CalculateIssueModelStatsRequest() ) transcode.return_value = { "method": "post", @@ -25377,17 +28221,21 @@ def test_update_issue_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Issue.to_json(resources.Issue()) + req.return_value._content = ( + contact_center_insights.CalculateIssueModelStatsResponse.to_json( + contact_center_insights.CalculateIssueModelStatsResponse() + ) + ) - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Issue() + post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - client.update_issue( + client.calculate_issue_model_stats( request, metadata=[ ("key", "val"), @@ -25399,8 +28247,9 @@ def test_update_issue_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.UpdateIssueRequest +def test_calculate_issue_model_stats_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25409,9 +28258,7 @@ def test_update_issue_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" } request = request_type(**request_init) @@ -25424,10 +28271,10 @@ def test_update_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_issue(request) + client.calculate_issue_model_stats(request) -def test_update_issue_rest_flattened(): +def test_calculate_issue_model_stats_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25436,19 +28283,16 @@ def test_update_issue_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Issue() + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + issue_model="issue_model_value", ) mock_args.update(sample_request) @@ -25456,25 +28300,27 @@ def test_update_issue_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_issue(**mock_args) + client.calculate_issue_model_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue.name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{issue_model=projects/*/locations/*/issueModels/*}:calculateIssueModelStats" % client.transport._host, args[1], ) -def test_update_issue_rest_flattened_error(transport: str = "rest"): +def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25483,14 +28329,13 @@ def test_update_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) -def test_update_issue_rest_error(): +def test_calculate_issue_model_stats_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25499,41 +28344,149 @@ def test_update_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueRequest, + contact_center_insights.CreatePhraseMatcherRequest, dict, ], ) -def test_delete_issue_rest(request_type): +def test_create_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["phrase_matcher"] = { + "name": "name_value", + "revision_id": "revision_id_value", + "version_tag": "version_tag_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "display_name": "display_name_value", + "type_": 1, + "active": True, + "phrase_match_rule_groups": [ + { + "type_": 1, + "phrase_match_rules": [ + { + "query": "query_value", + "negated": True, + "config": {"exact_match_config": {"case_sensitive": True}}, + } + ], + } + ], + "activation_update_time": {}, + "role_match": 1, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreatePhraseMatcherRequest.meta.fields[ + "phrase_matcher" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phrase_matcher"][field])): + del request_init["phrase_matcher"][field][i][subfield] + else: + del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue(request) + response = client.create_phrase_matcher(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_delete_issue_rest_use_cached_wrapped_rpc(): +def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25547,35 +28500,40 @@ def test_delete_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_issue in client._transport._wrapped_methods + assert ( + client._transport.create_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_phrase_matcher + ] = mock_rpc request = {} - client.delete_issue(request) + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_issue(request) + client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_issue_rest_required_fields( - request_type=contact_center_insights.DeleteIssueRequest, +def test_create_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25586,21 +28544,21 @@ def test_delete_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue._get_unset_required_fields(jsonified_request) + ).create_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue._get_unset_required_fields(jsonified_request) + ).create_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25609,7 +28567,7 @@ def test_delete_issue_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25621,36 +28579,48 @@ def test_delete_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue(request) + response = client.create_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_issue_rest_unset_required_fields(): +def test_create_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "phraseMatcher", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_issue_rest_interceptors(null_interceptor): +def test_create_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25663,11 +28633,14 @@ def test_delete_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue" + transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteIssueRequest.pb( - contact_center_insights.DeleteIssueRequest() + post.assert_not_called() + pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( + contact_center_insights.CreatePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -25679,15 +28652,19 @@ def test_delete_issue_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() + ) - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.PhraseMatcher() - client.delete_issue( + client.create_phrase_matcher( request, metadata=[ ("key", "val"), @@ -25696,10 +28673,12 @@ def test_delete_issue_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.DeleteIssueRequest +def test_create_phrase_matcher_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25707,9 +28686,7 @@ def test_delete_issue_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25721,10 +28698,10 @@ def test_delete_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_issue(request) + client.create_phrase_matcher(request) -def test_delete_issue_rest_flattened(): +def test_create_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25733,40 +28710,41 @@ def test_delete_issue_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_issue(**mock_args) + client.create_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{parent=projects/*/locations/*}/phraseMatchers" % client.transport._host, args[1], ) -def test_delete_issue_rest_flattened_error(transport: str = "rest"): +def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25775,13 +28753,14 @@ def test_delete_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), + parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_delete_issue_rest_error(): +def test_create_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25790,47 +28769,56 @@ def test_delete_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CalculateIssueModelStatsRequest, + contact_center_insights.GetPhraseMatcherRequest, dict, ], ) -def test_calculate_issue_model_stats_rest(request_type): +def test_get_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_issue_model_stats(request) + response = client.get_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): +def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25845,8 +28833,7 @@ def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.calculate_issue_model_stats - in client._transport._wrapped_methods + client._transport.get_phrase_matcher in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -25855,29 +28842,29 @@ def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.calculate_issue_model_stats + client._transport.get_phrase_matcher ] = mock_rpc request = {} - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_calculate_issue_model_stats_rest_required_fields( - request_type=contact_center_insights.CalculateIssueModelStatsRequest, +def test_get_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.GetPhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["issue_model"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25888,21 +28875,21 @@ def test_calculate_issue_model_stats_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) + ).get_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["issueModel"] = "issue_model_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) + ).get_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "issueModel" in jsonified_request - assert jsonified_request["issueModel"] == "issue_model_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25911,7 +28898,7 @@ def test_calculate_issue_model_stats_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25932,32 +28919,30 @@ def test_calculate_issue_model_stats_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_issue_model_stats(request) + response = client.get_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_calculate_issue_model_stats_rest_unset_required_fields(): +def test_get_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.calculate_issue_model_stats._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("issueModel",))) + unset_fields = transport.get_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): +def test_get_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25970,16 +28955,14 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_calculate_issue_model_stats", + transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "pre_calculate_issue_model_stats", + transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( - contact_center_insights.CalculateIssueModelStatsRequest() + pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( + contact_center_insights.GetPhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -25991,21 +28974,19 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.CalculateIssueModelStatsResponse.to_json( - contact_center_insights.CalculateIssueModelStatsResponse() - ) + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() ) - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.GetPhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + post.return_value = resources.PhraseMatcher() - client.calculate_issue_model_stats( + client.get_phrase_matcher( request, metadata=[ ("key", "val"), @@ -26017,9 +28998,9 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): post.assert_called_once() -def test_calculate_issue_model_stats_rest_bad_request( +def test_get_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CalculateIssueModelStatsRequest, + request_type=contact_center_insights.GetPhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26027,9 +29008,7 @@ def test_calculate_issue_model_stats_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26041,10 +29020,10 @@ def test_calculate_issue_model_stats_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) -def test_calculate_issue_model_stats_rest_flattened(): +def test_get_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26053,16 +29032,16 @@ def test_calculate_issue_model_stats_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method sample_request = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" } # get truthy value for each flattened field mock_args = dict( - issue_model="issue_model_value", + name="name_value", ) mock_args.update(sample_request) @@ -26070,27 +29049,25 @@ def test_calculate_issue_model_stats_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.calculate_issue_model_stats(**mock_args) + client.get_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue_model=projects/*/locations/*/issueModels/*}:calculateIssueModelStats" + "%s/v1/{name=projects/*/locations/*/phraseMatchers/*}" % client.transport._host, args[1], ) -def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest"): +def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26099,13 +29076,13 @@ def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), + name="name_value", ) -def test_calculate_issue_model_stats_rest_error(): +def test_get_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -26114,11 +29091,11 @@ def test_calculate_issue_model_stats_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreatePhraseMatcherRequest, + contact_center_insights.ListPhraseMatchersRequest, dict, ], ) -def test_create_phrase_matcher_rest(request_type): +def test_list_phrase_matchers_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26126,137 +29103,34 @@ def test_create_phrase_matcher_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["phrase_matcher"] = { - "name": "name_value", - "revision_id": "revision_id_value", - "version_tag": "version_tag_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - "display_name": "display_name_value", - "type_": 1, - "active": True, - "phrase_match_rule_groups": [ - { - "type_": 1, - "phrase_match_rules": [ - { - "query": "query_value", - "negated": True, - "config": {"exact_match_config": {"case_sensitive": True}}, - } - ], - } - ], - "activation_update_time": {}, - "role_match": 1, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreatePhraseMatcherRequest.meta.fields[ - "phrase_matcher" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["phrase_matcher"][field])): - del request_init["phrase_matcher"][field][i][subfield] - else: - del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + return_value = contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_phrase_matcher(request) + response = client.list_phrase_matchers(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersPager) + assert response.next_page_token == "next_page_token_value" -def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26271,8 +29145,7 @@ def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_phrase_matcher - in client._transport._wrapped_methods + client._transport.list_phrase_matchers in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26281,24 +29154,24 @@ def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_phrase_matcher + client._transport.list_phrase_matchers ] = mock_rpc request = {} - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.CreatePhraseMatcherRequest, +def test_list_phrase_matchers_rest_required_fields( + request_type=contact_center_insights.ListPhraseMatchersRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -26314,7 +29187,7 @@ def test_create_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_phrase_matcher._get_unset_required_fields(jsonified_request) + ).list_phrase_matchers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26323,7 +29196,15 @@ def test_create_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_phrase_matcher._get_unset_required_fields(jsonified_request) + ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26337,7 +29218,7 @@ def test_create_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = contact_center_insights.ListPhraseMatchersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26349,48 +29230,50 @@ def test_create_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_phrase_matcher(request) + response = client.list_phrase_matchers(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_phrase_matcher_rest_unset_required_fields(): +def test_list_phrase_matchers_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_phrase_matcher._get_unset_required_fields({}) + unset_fields = transport.list_phrase_matchers._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "phraseMatcher", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_phrase_matcher_rest_interceptors(null_interceptor): +def test_list_phrase_matchers_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26403,14 +29286,14 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( - contact_center_insights.CreatePhraseMatcherRequest() + pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( + contact_center_insights.ListPhraseMatchersRequest() ) transcode.return_value = { "method": "post", @@ -26422,19 +29305,21 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() + req.return_value._content = ( + contact_center_insights.ListPhraseMatchersResponse.to_json( + contact_center_insights.ListPhraseMatchersResponse() + ) ) - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() + post.return_value = contact_center_insights.ListPhraseMatchersResponse() - client.create_phrase_matcher( + client.list_phrase_matchers( request, metadata=[ ("key", "val"), @@ -26446,9 +29331,9 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_phrase_matcher_rest_bad_request( +def test_list_phrase_matchers_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreatePhraseMatcherRequest, + request_type=contact_center_insights.ListPhraseMatchersRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26468,10 +29353,10 @@ def test_create_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) -def test_create_phrase_matcher_rest_flattened(): +def test_list_phrase_matchers_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26480,7 +29365,7 @@ def test_create_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = contact_center_insights.ListPhraseMatchersResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -26488,7 +29373,6 @@ def test_create_phrase_matcher_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), ) mock_args.update(sample_request) @@ -26496,12 +29380,14 @@ def test_create_phrase_matcher_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_phrase_matcher(**mock_args) + client.list_phrase_matchers(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -26514,7 +29400,7 @@ def test_create_phrase_matcher_rest_flattened(): ) -def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_list_phrase_matchers_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26523,27 +29409,84 @@ def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), + client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_create_phrase_matcher_rest_error(): +def test_list_phrase_matchers_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + contact_center_insights.ListPhraseMatchersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_phrase_matchers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in results) + + pages = list(client.list_phrase_matchers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetPhraseMatcherRequest, + contact_center_insights.DeletePhraseMatcherRequest, dict, ], ) -def test_get_phrase_matcher_rest(request_type): +def test_delete_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26556,39 +29499,22 @@ def test_get_phrase_matcher_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_phrase_matcher(request) + response = client.delete_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None -def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26603,7 +29529,8 @@ def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_phrase_matcher in client._transport._wrapped_methods + client._transport.delete_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26612,24 +29539,24 @@ def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_phrase_matcher + client._transport.delete_phrase_matcher ] = mock_rpc request = {} - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.GetPhraseMatcherRequest, +def test_delete_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -26645,7 +29572,7 @@ def test_get_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_phrase_matcher._get_unset_required_fields(jsonified_request) + ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26654,7 +29581,7 @@ def test_get_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_phrase_matcher._get_unset_required_fields(jsonified_request) + ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26668,7 +29595,7 @@ def test_get_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26680,39 +29607,36 @@ def test_get_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_phrase_matcher(request) + response = client.delete_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_phrase_matcher_rest_unset_required_fields(): +def test_delete_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_phrase_matcher._get_unset_required_fields({}) + unset_fields = transport.delete_phrase_matcher._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_phrase_matcher_rest_interceptors(null_interceptor): +def test_delete_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26725,14 +29649,11 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_phrase_matcher" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( - contact_center_insights.GetPhraseMatcherRequest() + pb_message = contact_center_insights.DeletePhraseMatcherRequest.pb( + contact_center_insights.DeletePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -26744,19 +29665,15 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() - ) - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() - client.get_phrase_matcher( + client.delete_phrase_matcher( request, metadata=[ ("key", "val"), @@ -26765,12 +29682,11 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_phrase_matcher_rest_bad_request( +def test_delete_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.GetPhraseMatcherRequest, + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26790,10 +29706,10 @@ def test_get_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) -def test_get_phrase_matcher_rest_flattened(): +def test_delete_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26802,7 +29718,7 @@ def test_get_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -26818,13 +29734,11 @@ def test_get_phrase_matcher_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_phrase_matcher(**mock_args) + client.delete_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -26837,7 +29751,7 @@ def test_get_phrase_matcher_rest_flattened(): ) -def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26846,61 +29760,168 @@ def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), + client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), name="name_value", ) -def test_get_phrase_matcher_rest_error(): +def test_delete_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.ListPhraseMatchersRequest, - dict, - ], -) -def test_list_phrase_matchers_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdatePhraseMatcherRequest, + dict, + ], +) +def test_update_phrase_matcher_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } + request_init["phrase_matcher"] = { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3", + "revision_id": "revision_id_value", + "version_tag": "version_tag_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "display_name": "display_name_value", + "type_": 1, + "active": True, + "phrase_match_rule_groups": [ + { + "type_": 1, + "phrase_match_rules": [ + { + "query": "query_value", + "negated": True, + "config": {"exact_match_config": {"case_sensitive": True}}, + } + ], + } + ], + "activation_update_time": {}, + "role_match": 1, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdatePhraseMatcherRequest.meta.fields[ + "phrase_matcher" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phrase_matcher"][field])): + del request_init["phrase_matcher"][field][i][subfield] + else: + del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_phrase_matchers(request) + response = client.update_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): +def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26915,7 +29936,8 @@ def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_phrase_matchers in client._transport._wrapped_methods + client._transport.update_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26924,29 +29946,28 @@ def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_phrase_matchers + client._transport.update_phrase_matcher ] = mock_rpc request = {} - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_phrase_matchers_rest_required_fields( - request_type=contact_center_insights.ListPhraseMatchersRequest, +def test_update_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -26957,29 +29978,19 @@ def test_list_phrase_matchers_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + ).update_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + ).update_phrase_matcher._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26988,7 +29999,7 @@ def test_list_phrase_matchers_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse() + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27000,50 +30011,40 @@ def test_list_phrase_matchers_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_phrase_matchers(request) + response = client.update_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_phrase_matchers_rest_unset_required_fields(): +def test_update_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_phrase_matchers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.update_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("phraseMatcher",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_phrase_matchers_rest_interceptors(null_interceptor): +def test_update_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27056,14 +30057,14 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" + transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" + transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( - contact_center_insights.ListPhraseMatchersRequest() + pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( + contact_center_insights.UpdatePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -27075,21 +30076,19 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListPhraseMatchersResponse.to_json( - contact_center_insights.ListPhraseMatchersResponse() - ) + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() ) - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListPhraseMatchersResponse() + post.return_value = resources.PhraseMatcher() - client.list_phrase_matchers( + client.update_phrase_matcher( request, metadata=[ ("key", "val"), @@ -27101,9 +30100,9 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_phrase_matchers_rest_bad_request( +def test_update_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.ListPhraseMatchersRequest, + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27111,7 +30110,11 @@ def test_list_phrase_matchers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27123,10 +30126,10 @@ def test_list_phrase_matchers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) -def test_list_phrase_matchers_rest_flattened(): +def test_update_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27135,14 +30138,19 @@ def test_list_phrase_matchers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse() + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -27150,141 +30158,89 @@ def test_list_phrase_matchers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_phrase_matchers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/phraseMatchers" - % client.transport._host, - args[1], - ) - - -def test_list_phrase_matchers_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), - parent="parent_value", - ) - - -def test_list_phrase_matchers_rest_pager(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - ) - # Two responses for two calls - response = response + response + req.return_value = response_value - # Wrap the values into proper Response objs - response = tuple( - contact_center_insights.ListPhraseMatchersResponse.to_json(x) - for x in response + client.update_phrase_matcher(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{phrase_matcher.name=projects/*/locations/*/phraseMatchers/*}" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_phrase_matchers(request=sample_request) +def test_update_phrase_matcher_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - pages = list(client.list_phrase_matchers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_update_phrase_matcher_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeletePhraseMatcherRequest, + contact_center_insights.CalculateStatsRequest, dict, ], ) -def test_delete_phrase_matcher_rest(request_type): +def test_calculate_stats_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} + request_init = {"location": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_phrase_matcher(request) + response = client.calculate_stats(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 -def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_calculate_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27298,40 +30254,35 @@ def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.calculate_stats in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc request = {} - client.delete_phrase_matcher(request) + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_phrase_matcher(request) + client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.DeletePhraseMatcherRequest, +def test_calculate_stats_rest_required_fields( + request_type=contact_center_insights.CalculateStatsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["location"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27342,21 +30293,23 @@ def test_delete_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) + ).calculate_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["location"] = "location_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) + ).calculate_stats._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27365,7 +30318,7 @@ def test_delete_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27377,36 +30330,41 @@ def test_delete_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_phrase_matcher(request) + response = client.calculate_stats(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_phrase_matcher_rest_unset_required_fields(): +def test_calculate_stats_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_phrase_matcher._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.calculate_stats._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter",)) & set(("location",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_phrase_matcher_rest_interceptors(null_interceptor): +def test_calculate_stats_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27419,11 +30377,14 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeletePhraseMatcherRequest.pb( - contact_center_insights.DeletePhraseMatcherRequest() + post.assert_not_called() + pb_message = contact_center_insights.CalculateStatsRequest.pb( + contact_center_insights.CalculateStatsRequest() ) transcode.return_value = { "method": "post", @@ -27435,15 +30396,21 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + contact_center_insights.CalculateStatsResponse.to_json( + contact_center_insights.CalculateStatsResponse() + ) + ) - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.CalculateStatsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = contact_center_insights.CalculateStatsResponse() - client.delete_phrase_matcher( + client.calculate_stats( request, metadata=[ ("key", "val"), @@ -27452,11 +30419,11 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_phrase_matcher_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeletePhraseMatcherRequest, +def test_calculate_stats_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.CalculateStatsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27464,7 +30431,7 @@ def test_delete_phrase_matcher_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} + request_init = {"location": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27476,10 +30443,10 @@ def test_delete_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_phrase_matcher(request) + client.calculate_stats(request) -def test_delete_phrase_matcher_rest_flattened(): +def test_calculate_stats_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27488,40 +30455,40 @@ def test_delete_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } + sample_request = {"location": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + location="location_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_phrase_matcher(**mock_args) + client.calculate_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/phraseMatchers/*}" + "%s/v1/{location=projects/*/locations/*}/conversations:calculateStats" % client.transport._host, args[1], ) -def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_calculate_stats_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27530,168 +30497,61 @@ def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), - name="name_value", - ) - - -def test_delete_phrase_matcher_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdatePhraseMatcherRequest, - dict, - ], -) -def test_update_phrase_matcher_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } - request_init["phrase_matcher"] = { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3", - "revision_id": "revision_id_value", - "version_tag": "version_tag_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - "display_name": "display_name_value", - "type_": 1, - "active": True, - "phrase_match_rule_groups": [ - { - "type_": 1, - "phrase_match_rules": [ - { - "query": "query_value", - "negated": True, - "config": {"exact_match_config": {"case_sensitive": True}}, - } - ], - } - ], - "activation_update_time": {}, - "role_match": 1, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdatePhraseMatcherRequest.meta.fields[ - "phrase_matcher" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_calculate_stats_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["phrase_matcher"][field])): - del request_init["phrase_matcher"][field][i][subfield] - else: - del request_init["phrase_matcher"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetSettingsRequest, + dict, + ], +) +def test_get_settings_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( + return_value = resources.Settings( name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + language_code="language_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_phrase_matcher(request) + response = client.get_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response.language_code == "language_code_value" -def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_get_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27705,39 +30565,35 @@ def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} - client.update_phrase_matcher(request) + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_phrase_matcher(request) + client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.UpdatePhraseMatcherRequest, +def test_get_settings_rest_required_fields( + request_type=contact_center_insights.GetSettingsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27748,19 +30604,21 @@ def test_update_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_phrase_matcher._get_unset_required_fields(jsonified_request) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_phrase_matcher._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27769,7 +30627,7 @@ def test_update_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = resources.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27781,40 +30639,39 @@ def test_update_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_phrase_matcher(request) + response = client.get_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_phrase_matcher_rest_unset_required_fields(): +def test_get_settings_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_phrase_matcher._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("phraseMatcher",))) + unset_fields = transport.get_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_phrase_matcher_rest_interceptors(null_interceptor): +def test_get_settings_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27827,14 +30684,14 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_get_settings" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( - contact_center_insights.UpdatePhraseMatcherRequest() + pb_message = contact_center_insights.GetSettingsRequest.pb( + contact_center_insights.GetSettingsRequest() ) transcode.return_value = { "method": "post", @@ -27846,19 +30703,17 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() - ) + req.return_value._content = resources.Settings.to_json(resources.Settings()) - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.GetSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() + post.return_value = resources.Settings() - client.update_phrase_matcher( + client.get_settings( request, metadata=[ ("key", "val"), @@ -27870,9 +30725,8 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_phrase_matcher_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UpdatePhraseMatcherRequest, +def test_get_settings_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetSettingsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27880,11 +30734,7 @@ def test_update_phrase_matcher_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27896,10 +30746,10 @@ def test_update_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_phrase_matcher(request) + client.get_settings(request) -def test_update_phrase_matcher_rest_flattened(): +def test_get_settings_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27908,19 +30758,14 @@ def test_update_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = resources.Settings() # get arguments that satisfy an http rule for this method - sample_request = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/settings"} # get truthy value for each flattened field mock_args = dict( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -27928,89 +30773,187 @@ def test_update_phrase_matcher_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_phrase_matcher(**mock_args) + client.get_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{phrase_matcher.name=projects/*/locations/*/phraseMatchers/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_update_phrase_matcher_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_get_settings_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", + ) + + +def test_get_settings_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateSettingsRequest, + dict, + ], +) +def test_update_settings_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} + request_init["settings"] = { + "name": "projects/sample1/locations/sample2/settings", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "language_code": "language_code_value", + "conversation_ttl": {"seconds": 751, "nanos": 543}, + "pubsub_notification_settings": {}, + "analysis_config": { + "runtime_integration_analysis_percentage": 0.4167, + "upload_conversation_analysis_percentage": 0.41590000000000005, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "redaction_config": { + "deidentify_template": "deidentify_template_value", + "inspect_template": "inspect_template_value", + }, + "speech_config": {"speech_recognizer": "speech_recognizer_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateSettingsRequest.meta.fields["settings"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_update_phrase_matcher_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["settings"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CalculateStatsRequest, - dict, - ], -) -def test_calculate_stats_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["settings"][field])): + del request_init["settings"][field][i][subfield] + else: + del request_init["settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, + return_value = resources.Settings( + name="name_value", + language_code="language_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_stats(request) + response = client.update_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert isinstance(response, resources.Settings) + assert response.name == "name_value" + assert response.language_code == "language_code_value" -def test_calculate_stats_rest_use_cached_wrapped_rpc(): +def test_update_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28024,35 +30967,34 @@ def test_calculate_stats_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.calculate_stats in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} - client.calculate_stats(request) + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_stats(request) + client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_calculate_stats_rest_required_fields( - request_type=contact_center_insights.CalculateStatsRequest, +def test_update_settings_rest_required_fields( + request_type=contact_center_insights.UpdateSettingsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["location"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28063,23 +31005,19 @@ def test_calculate_stats_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_stats._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["location"] = "location_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_stats._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter",)) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28088,7 +31026,7 @@ def test_calculate_stats_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse() + return_value = resources.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28100,41 +31038,48 @@ def test_calculate_stats_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb( - return_value - ) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_stats(request) + response = client.update_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_calculate_stats_rest_unset_required_fields(): +def test_update_settings_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.calculate_stats._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter",)) & set(("location",))) + unset_fields = transport.update_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "settings", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_calculate_stats_rest_interceptors(null_interceptor): +def test_update_settings_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28147,14 +31092,14 @@ def test_calculate_stats_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" + transports.ContactCenterInsightsRestInterceptor, "post_update_settings" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" + transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CalculateStatsRequest.pb( - contact_center_insights.CalculateStatsRequest() + pb_message = contact_center_insights.UpdateSettingsRequest.pb( + contact_center_insights.UpdateSettingsRequest() ) transcode.return_value = { "method": "post", @@ -28166,21 +31111,17 @@ def test_calculate_stats_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.CalculateStatsResponse.to_json( - contact_center_insights.CalculateStatsResponse() - ) - ) + req.return_value._content = resources.Settings.to_json(resources.Settings()) - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.UpdateSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.CalculateStatsResponse() + post.return_value = resources.Settings() - client.calculate_stats( + client.update_settings( request, metadata=[ ("key", "val"), @@ -28192,8 +31133,8 @@ def test_calculate_stats_rest_interceptors(null_interceptor): post.assert_called_once() -def test_calculate_stats_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.CalculateStatsRequest +def test_update_settings_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.UpdateSettingsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28201,7 +31142,7 @@ def test_calculate_stats_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28213,10 +31154,10 @@ def test_calculate_stats_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.calculate_stats(request) + client.update_settings(request) -def test_calculate_stats_rest_flattened(): +def test_update_settings_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28225,14 +31166,17 @@ def test_calculate_stats_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse() + return_value = resources.Settings() # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} + sample_request = { + "settings": {"name": "projects/sample1/locations/sample2/settings"} + } # get truthy value for each flattened field mock_args = dict( - location="location_value", + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -28240,25 +31184,25 @@ def test_calculate_stats_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.calculate_stats(**mock_args) + client.update_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{location=projects/*/locations/*}/conversations:calculateStats" + "%s/v1/{settings.name=projects/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_calculate_stats_rest_flattened_error(transport: str = "rest"): +def test_update_settings_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28267,13 +31211,14 @@ def test_calculate_stats_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_calculate_stats_rest_error(): +def test_update_settings_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28282,46 +31227,46 @@ def test_calculate_stats_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetSettingsRequest, + contact_center_insights.GetEncryptionSpecRequest, dict, ], ) -def test_get_settings_rest(request_type): +def test_get_encryption_spec_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/settings"} + request_init = {"name": "projects/sample1/locations/sample2/encryptionSpec"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings( + return_value = resources.EncryptionSpec( name="name_value", - language_code="language_code_value", + kms_key="kms_key_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_settings(request) + response = client.get_encryption_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.EncryptionSpec) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.kms_key == "kms_key_value" -def test_get_settings_rest_use_cached_wrapped_rpc(): +def test_get_encryption_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28335,30 +31280,34 @@ def test_get_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_settings in client._transport._wrapped_methods + assert ( + client._transport.get_encryption_spec in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_encryption_spec + ] = mock_rpc request = {} - client.get_settings(request) + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_settings(request) + client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_settings_rest_required_fields( - request_type=contact_center_insights.GetSettingsRequest, +def test_get_encryption_spec_rest_required_fields( + request_type=contact_center_insights.GetEncryptionSpecRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -28374,7 +31323,7 @@ def test_get_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_settings._get_unset_required_fields(jsonified_request) + ).get_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -28383,7 +31332,7 @@ def test_get_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_settings._get_unset_required_fields(jsonified_request) + ).get_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -28397,7 +31346,7 @@ def test_get_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = resources.EncryptionSpec() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28418,30 +31367,30 @@ def test_get_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_settings(request) + response = client.get_encryption_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_settings_rest_unset_required_fields(): +def test_get_encryption_spec_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_settings._get_unset_required_fields({}) + unset_fields = transport.get_encryption_spec._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_settings_rest_interceptors(null_interceptor): +def test_get_encryption_spec_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28454,14 +31403,14 @@ def test_get_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_settings" + transports.ContactCenterInsightsRestInterceptor, "post_get_encryption_spec" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" + transports.ContactCenterInsightsRestInterceptor, "pre_get_encryption_spec" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetSettingsRequest.pb( - contact_center_insights.GetSettingsRequest() + pb_message = contact_center_insights.GetEncryptionSpecRequest.pb( + contact_center_insights.GetEncryptionSpecRequest() ) transcode.return_value = { "method": "post", @@ -28473,17 +31422,19 @@ def test_get_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Settings.to_json(resources.Settings()) + req.return_value._content = resources.EncryptionSpec.to_json( + resources.EncryptionSpec() + ) - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Settings() + post.return_value = resources.EncryptionSpec() - client.get_settings( + client.get_encryption_spec( request, metadata=[ ("key", "val"), @@ -28495,8 +31446,9 @@ def test_get_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_settings_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetSettingsRequest +def test_get_encryption_spec_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.GetEncryptionSpecRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28504,7 +31456,7 @@ def test_get_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/settings"} + request_init = {"name": "projects/sample1/locations/sample2/encryptionSpec"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28516,10 +31468,10 @@ def test_get_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_settings(request) + client.get_encryption_spec(request) -def test_get_settings_rest_flattened(): +def test_get_encryption_spec_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28528,10 +31480,10 @@ def test_get_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = resources.EncryptionSpec() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/settings"} + sample_request = {"name": "projects/sample1/locations/sample2/encryptionSpec"} # get truthy value for each flattened field mock_args = dict( @@ -28543,24 +31495,25 @@ def test_get_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_settings(**mock_args) + client.get_encryption_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/settings}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/encryptionSpec}" + % client.transport._host, args[1], ) -def test_get_settings_rest_flattened_error(transport: str = "rest"): +def test_get_encryption_spec_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28569,13 +31522,13 @@ def test_get_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - contact_center_insights.GetSettingsRequest(), + client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), name="name_value", ) -def test_get_settings_rest_error(): +def test_get_encryption_spec_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28584,146 +31537,41 @@ def test_get_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateSettingsRequest, + contact_center_insights.InitializeEncryptionSpecRequest, dict, ], ) -def test_update_settings_rest(request_type): +def test_initialize_encryption_spec_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} - request_init["settings"] = { - "name": "projects/sample1/locations/sample2/settings", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "language_code": "language_code_value", - "conversation_ttl": {"seconds": 751, "nanos": 543}, - "pubsub_notification_settings": {}, - "analysis_config": { - "runtime_integration_analysis_percentage": 0.4167, - "upload_conversation_analysis_percentage": 0.41590000000000005, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - }, - "redaction_config": { - "deidentify_template": "deidentify_template_value", - "inspect_template": "inspect_template_value", - }, - "speech_config": {"speech_recognizer": "speech_recognizer_value"}, + request_init = { + "encryption_spec": {"name": "projects/sample1/locations/sample2/encryptionSpec"} } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateSettingsRequest.meta.fields["settings"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["settings"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["settings"][field])): - del request_init["settings"][field][i][subfield] - else: - del request_init["settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings( - name="name_value", - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_settings(request) + response = client.initialize_encryption_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_update_settings_rest_use_cached_wrapped_rpc(): +def test_initialize_encryption_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28737,30 +31585,39 @@ def test_update_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_settings in client._transport._wrapped_methods + assert ( + client._transport.initialize_encryption_spec + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.initialize_encryption_spec + ] = mock_rpc request = {} - client.update_settings(request) + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_settings(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_settings_rest_required_fields( - request_type=contact_center_insights.UpdateSettingsRequest, +def test_initialize_encryption_spec_rest_required_fields( + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -28775,16 +31632,14 @@ def test_update_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_settings._get_unset_required_fields(jsonified_request) + ).initialize_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).initialize_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -28796,7 +31651,7 @@ def test_update_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28808,7 +31663,7 @@ def test_update_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -28816,40 +31671,29 @@ def test_update_settings_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_settings(request) + response = client.initialize_encryption_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_settings_rest_unset_required_fields(): +def test_initialize_encryption_spec_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_settings._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "settings", - "updateMask", - ) - ) - ) + unset_fields = transport.initialize_encryption_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("encryptionSpec",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_settings_rest_interceptors(null_interceptor): +def test_initialize_encryption_spec_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28862,14 +31706,18 @@ def test_update_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_settings" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_initialize_encryption_spec", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" + transports.ContactCenterInsightsRestInterceptor, + "pre_initialize_encryption_spec", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateSettingsRequest.pb( - contact_center_insights.UpdateSettingsRequest() + pb_message = contact_center_insights.InitializeEncryptionSpecRequest.pb( + contact_center_insights.InitializeEncryptionSpecRequest() ) transcode.return_value = { "method": "post", @@ -28881,17 +31729,19 @@ def test_update_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Settings.to_json(resources.Settings()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Settings() + post.return_value = operations_pb2.Operation() - client.update_settings( + client.initialize_encryption_spec( request, metadata=[ ("key", "val"), @@ -28903,8 +31753,9 @@ def test_update_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_settings_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.UpdateSettingsRequest +def test_initialize_encryption_spec_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28912,7 +31763,9 @@ def test_update_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} + request_init = { + "encryption_spec": {"name": "projects/sample1/locations/sample2/encryptionSpec"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28924,10 +31777,10 @@ def test_update_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_settings(request) + client.initialize_encryption_spec(request) -def test_update_settings_rest_flattened(): +def test_initialize_encryption_spec_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28936,43 +31789,42 @@ def test_update_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "settings": {"name": "projects/sample1/locations/sample2/settings"} + "encryption_spec": { + "name": "projects/sample1/locations/sample2/encryptionSpec" + } } # get truthy value for each flattened field mock_args = dict( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_settings(**mock_args) + client.initialize_encryption_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{settings.name=projects/*/locations/*/settings}" + "%s/v1/{encryption_spec.name=projects/*/locations/*/encryptionSpec}:initialize" % client.transport._host, args[1], ) -def test_update_settings_rest_flattened_error(transport: str = "rest"): +def test_initialize_encryption_spec_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28981,14 +31833,13 @@ def test_update_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) -def test_update_settings_rest_error(): +def test_initialize_encryption_spec_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -30889,6 +33740,8 @@ def test_contact_center_insights_base_transport(): "delete_issue_model", "deploy_issue_model", "undeploy_issue_model", + "export_issue_model", + "import_issue_model", "get_issue", "list_issues", "update_issue", @@ -30902,6 +33755,8 @@ def test_contact_center_insights_base_transport(): "calculate_stats", "get_settings", "update_settings", + "get_encryption_spec", + "initialize_encryption_spec", "create_view", "get_view", "list_views", @@ -31257,6 +34112,12 @@ def test_contact_center_insights_client_transport_session_collision(transport_na session1 = client1.transport.undeploy_issue_model._session session2 = client2.transport.undeploy_issue_model._session assert session1 != session2 + session1 = client1.transport.export_issue_model._session + session2 = client2.transport.export_issue_model._session + assert session1 != session2 + session1 = client1.transport.import_issue_model._session + session2 = client2.transport.import_issue_model._session + assert session1 != session2 session1 = client1.transport.get_issue._session session2 = client2.transport.get_issue._session assert session1 != session2 @@ -31296,6 +34157,12 @@ def test_contact_center_insights_client_transport_session_collision(transport_na session1 = client1.transport.update_settings._session session2 = client2.transport.update_settings._session assert session1 != session2 + session1 = client1.transport.get_encryption_spec._session + session2 = client2.transport.get_encryption_spec._session + assert session1 != session2 + session1 = client1.transport.initialize_encryption_spec._session + session2 = client2.transport.initialize_encryption_spec._session + assert session1 != session2 session1 = client1.transport.create_view._session session2 = client2.transport.create_view._session assert session1 != session2 @@ -31562,11 +34429,34 @@ def test_parse_conversation_profile_path(): assert expected == actual -def test_issue_path(): +def test_encryption_spec_path(): project = "winkle" location = "nautilus" - issue_model = "scallop" - issue = "abalone" + expected = "projects/{project}/locations/{location}/encryptionSpec".format( + project=project, + location=location, + ) + actual = ContactCenterInsightsClient.encryption_spec_path(project, location) + assert expected == actual + + +def test_parse_encryption_spec_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ContactCenterInsightsClient.encryption_spec_path(**expected) + + # Check that the path construction is reversible. + actual = ContactCenterInsightsClient.parse_encryption_spec_path(path) + assert expected == actual + + +def test_issue_path(): + project = "squid" + location = "clam" + issue_model = "whelk" + issue = "octopus" expected = "projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}".format( project=project, location=location, @@ -31581,10 +34471,10 @@ def test_issue_path(): def test_parse_issue_path(): expected = { - "project": "squid", - "location": "clam", - "issue_model": "whelk", - "issue": "octopus", + "project": "oyster", + "location": "nudibranch", + "issue_model": "cuttlefish", + "issue": "mussel", } path = ContactCenterInsightsClient.issue_path(**expected) @@ -31594,9 +34484,9 @@ def test_parse_issue_path(): def test_issue_model_path(): - project = "oyster" - location = "nudibranch" - issue_model = "cuttlefish" + project = "winkle" + location = "nautilus" + issue_model = "scallop" expected = ( "projects/{project}/locations/{location}/issueModels/{issue_model}".format( project=project, @@ -31612,9 +34502,9 @@ def test_issue_model_path(): def test_parse_issue_model_path(): expected = { - "project": "mussel", - "location": "winkle", - "issue_model": "nautilus", + "project": "abalone", + "location": "squid", + "issue_model": "clam", } path = ContactCenterInsightsClient.issue_model_path(**expected) @@ -31624,9 +34514,9 @@ def test_parse_issue_model_path(): def test_participant_path(): - project = "scallop" - conversation = "abalone" - participant = "squid" + project = "whelk" + conversation = "octopus" + participant = "oyster" expected = "projects/{project}/conversations/{conversation}/participants/{participant}".format( project=project, conversation=conversation, @@ -31640,9 +34530,9 @@ def test_participant_path(): def test_parse_participant_path(): expected = { - "project": "clam", - "conversation": "whelk", - "participant": "octopus", + "project": "nudibranch", + "conversation": "cuttlefish", + "participant": "mussel", } path = ContactCenterInsightsClient.participant_path(**expected) @@ -31652,9 +34542,9 @@ def test_parse_participant_path(): def test_phrase_matcher_path(): - project = "oyster" - location = "nudibranch" - phrase_matcher = "cuttlefish" + project = "winkle" + location = "nautilus" + phrase_matcher = "scallop" expected = "projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher}".format( project=project, location=location, @@ -31668,9 +34558,9 @@ def test_phrase_matcher_path(): def test_parse_phrase_matcher_path(): expected = { - "project": "mussel", - "location": "winkle", - "phrase_matcher": "nautilus", + "project": "abalone", + "location": "squid", + "phrase_matcher": "clam", } path = ContactCenterInsightsClient.phrase_matcher_path(**expected) @@ -31680,9 +34570,9 @@ def test_parse_phrase_matcher_path(): def test_recognizer_path(): - project = "scallop" - location = "abalone" - recognizer = "squid" + project = "whelk" + location = "octopus" + recognizer = "oyster" expected = ( "projects/{project}/locations/{location}/recognizers/{recognizer}".format( project=project, @@ -31696,9 +34586,9 @@ def test_recognizer_path(): def test_parse_recognizer_path(): expected = { - "project": "clam", - "location": "whelk", - "recognizer": "octopus", + "project": "nudibranch", + "location": "cuttlefish", + "recognizer": "mussel", } path = ContactCenterInsightsClient.recognizer_path(**expected) @@ -31708,8 +34598,8 @@ def test_parse_recognizer_path(): def test_settings_path(): - project = "oyster" - location = "nudibranch" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}/settings".format( project=project, location=location, @@ -31720,8 +34610,8 @@ def test_settings_path(): def test_parse_settings_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "scallop", + "location": "abalone", } path = ContactCenterInsightsClient.settings_path(**expected) @@ -31731,9 +34621,9 @@ def test_parse_settings_path(): def test_view_path(): - project = "winkle" - location = "nautilus" - view = "scallop" + project = "squid" + location = "clam" + view = "whelk" expected = "projects/{project}/locations/{location}/views/{view}".format( project=project, location=location, @@ -31745,9 +34635,9 @@ def test_view_path(): def test_parse_view_path(): expected = { - "project": "abalone", - "location": "squid", - "view": "clam", + "project": "octopus", + "location": "oyster", + "view": "nudibranch", } path = ContactCenterInsightsClient.view_path(**expected) @@ -31757,7 +34647,7 @@ def test_parse_view_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -31767,7 +34657,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = ContactCenterInsightsClient.common_billing_account_path(**expected) @@ -31777,7 +34667,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -31787,7 +34677,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = ContactCenterInsightsClient.common_folder_path(**expected) @@ -31797,7 +34687,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -31807,7 +34697,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = ContactCenterInsightsClient.common_organization_path(**expected) @@ -31817,7 +34707,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -31827,7 +34717,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = ContactCenterInsightsClient.common_project_path(**expected) @@ -31837,8 +34727,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -31849,8 +34739,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = ContactCenterInsightsClient.common_location_path(**expected) From c859d14990dbdf2c59a09265b1c91479f134aaa6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 11:16:35 -0400 Subject: [PATCH 56/59] feat: [google-shopping-merchant-datasources] adding some more information about supplemental data sources (#13108) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: adding some more information about supplemental data sources feat: Add FileUploads service docs: A comment for enum value `PRODUCTS` in enum `Channel` is changed END_COMMIT_OVERRIDE PiperOrigin-RevId: 681900944 Source-Link: https://github.com/googleapis/googleapis/commit/78d1fb208e4af3022a0aaf27bafa578ff326326e Source-Link: https://github.com/googleapis/googleapis-gen/commit/19badfac8741859ca06f191977455b370e84e14b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWRhdGFzb3VyY2VzLy5Pd2xCb3QueWFtbCIsImgiOiIxOWJhZGZhYzg3NDE4NTljYTA2ZjE5MTk3NzQ1NWIzNzBlODRlMTRiIn0= BEGIN_NESTED_COMMIT feat: [google-shopping-merchant-datasources] Add FileUploads service docs: A comment for enum value `PRODUCTS` in enum `Channel` is changed PiperOrigin-RevId: 678641097 Source-Link: https://github.com/googleapis/googleapis/commit/9c4c174205d923f9490d534c6e54c2d18ddc9d8f Source-Link: https://github.com/googleapis/googleapis-gen/commit/13cfe504a6f0aedc0416c23851f278157834f905 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWRhdGFzb3VyY2VzLy5Pd2xCb3QueWFtbCIsImgiOiIxM2NmZTUwNGE2ZjBhZWRjMDQxNmMyMzg1MWYyNzgxNTc4MzRmOTA1In0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: Anthonios Partheniou --- .../file_uploads_service.rst | 6 + .../merchant_datasources_v1beta/services_.rst | 1 + .../shopping/merchant_datasources/__init__.py | 16 + .../merchant_datasources_v1beta/__init__.py | 11 + .../gapic_metadata.json | 34 + .../services/file_uploads_service/__init__.py | 22 + .../file_uploads_service/async_client.py | 389 +++ .../services/file_uploads_service/client.py | 815 +++++ .../transports/__init__.py | 38 + .../file_uploads_service/transports/base.py | 161 + .../file_uploads_service/transports/grpc.py | 272 ++ .../transports/grpc_asyncio.py | 284 ++ .../file_uploads_service/transports/rest.py | 309 ++ .../types/__init__.py | 5 + .../types/datasourcetypes.py | 118 +- .../types/fileuploads.py | 205 ++ ...e_uploads_service_get_file_upload_async.py | 52 + ...le_uploads_service_get_file_upload_sync.py | 52 + ....shopping.merchant.datasources.v1beta.json | 161 + ...up_merchant_datasources_v1beta_keywords.py | 1 + .../test_data_sources_service.py | 20 + .../test_file_uploads_service.py | 2632 +++++++++++++++++ 22 files changed, 5603 insertions(+), 1 deletion(-) create mode 100644 packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py create mode 100644 packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py create mode 100644 packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py create mode 100644 packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst new file mode 100644 index 000000000000..2cb7cfc2bd21 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst @@ -0,0 +1,6 @@ +FileUploadsService +------------------------------------ + +.. automodule:: google.shopping.merchant_datasources_v1beta.services.file_uploads_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst index e81d9b15d547..c306312a77f1 100644 --- a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst @@ -4,3 +4,4 @@ Services for Google Shopping Merchant Datasources v1beta API :maxdepth: 2 data_sources_service + file_uploads_service diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py index 5c29874728d6..d96be2f6dc0b 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py @@ -24,6 +24,12 @@ from google.shopping.merchant_datasources_v1beta.services.data_sources_service.client import ( DataSourcesServiceClient, ) +from google.shopping.merchant_datasources_v1beta.services.file_uploads_service.async_client import ( + FileUploadsServiceAsyncClient, +) +from google.shopping.merchant_datasources_v1beta.services.file_uploads_service.client import ( + FileUploadsServiceClient, +) from google.shopping.merchant_datasources_v1beta.types.datasources import ( CreateDataSourceRequest, DataSource, @@ -35,6 +41,7 @@ UpdateDataSourceRequest, ) from google.shopping.merchant_datasources_v1beta.types.datasourcetypes import ( + DataSourceReference, LocalInventoryDataSource, PrimaryProductDataSource, PromotionDataSource, @@ -42,10 +49,16 @@ SupplementalProductDataSource, ) from google.shopping.merchant_datasources_v1beta.types.fileinputs import FileInput +from google.shopping.merchant_datasources_v1beta.types.fileuploads import ( + FileUpload, + GetFileUploadRequest, +) __all__ = ( "DataSourcesServiceClient", "DataSourcesServiceAsyncClient", + "FileUploadsServiceClient", + "FileUploadsServiceAsyncClient", "CreateDataSourceRequest", "DataSource", "DeleteDataSourceRequest", @@ -54,10 +67,13 @@ "ListDataSourcesRequest", "ListDataSourcesResponse", "UpdateDataSourceRequest", + "DataSourceReference", "LocalInventoryDataSource", "PrimaryProductDataSource", "PromotionDataSource", "RegionalInventoryDataSource", "SupplementalProductDataSource", "FileInput", + "FileUpload", + "GetFileUploadRequest", ) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py index 7819b598ee4a..5a8e3cd79467 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py @@ -22,6 +22,10 @@ DataSourcesServiceAsyncClient, DataSourcesServiceClient, ) +from .services.file_uploads_service import ( + FileUploadsServiceAsyncClient, + FileUploadsServiceClient, +) from .types.datasources import ( CreateDataSourceRequest, DataSource, @@ -33,6 +37,7 @@ UpdateDataSourceRequest, ) from .types.datasourcetypes import ( + DataSourceReference, LocalInventoryDataSource, PrimaryProductDataSource, PromotionDataSource, @@ -40,16 +45,22 @@ SupplementalProductDataSource, ) from .types.fileinputs import FileInput +from .types.fileuploads import FileUpload, GetFileUploadRequest __all__ = ( "DataSourcesServiceAsyncClient", + "FileUploadsServiceAsyncClient", "CreateDataSourceRequest", "DataSource", + "DataSourceReference", "DataSourcesServiceClient", "DeleteDataSourceRequest", "FetchDataSourceRequest", "FileInput", + "FileUpload", + "FileUploadsServiceClient", "GetDataSourceRequest", + "GetFileUploadRequest", "ListDataSourcesRequest", "ListDataSourcesResponse", "LocalInventoryDataSource", diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json index 22bf4c71ef65..ec4728128d9c 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json @@ -113,6 +113,40 @@ } } } + }, + "FileUploadsService": { + "clients": { + "grpc": { + "libraryClient": "FileUploadsServiceClient", + "rpcs": { + "GetFileUpload": { + "methods": [ + "get_file_upload" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FileUploadsServiceAsyncClient", + "rpcs": { + "GetFileUpload": { + "methods": [ + "get_file_upload" + ] + } + } + }, + "rest": { + "libraryClient": "FileUploadsServiceClient", + "rpcs": { + "GetFileUpload": { + "methods": [ + "get_file_upload" + ] + } + } + } + } } } } diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py new file mode 100644 index 000000000000..0adf352dc7ad --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import FileUploadsServiceAsyncClient +from .client import FileUploadsServiceClient + +__all__ = ( + "FileUploadsServiceClient", + "FileUploadsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py new file mode 100644 index 000000000000..909683366907 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .client import FileUploadsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport +from .transports.grpc_asyncio import FileUploadsServiceGrpcAsyncIOTransport + + +class FileUploadsServiceAsyncClient: + """Service to manage data source file uploads.""" + + _client: FileUploadsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = FileUploadsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = FileUploadsServiceClient._DEFAULT_UNIVERSE + + file_upload_path = staticmethod(FileUploadsServiceClient.file_upload_path) + parse_file_upload_path = staticmethod( + FileUploadsServiceClient.parse_file_upload_path + ) + common_billing_account_path = staticmethod( + FileUploadsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + FileUploadsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(FileUploadsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + FileUploadsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + FileUploadsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + FileUploadsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(FileUploadsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + FileUploadsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(FileUploadsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + FileUploadsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceAsyncClient: The constructed client. + """ + return FileUploadsServiceClient.from_service_account_info.__func__(FileUploadsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceAsyncClient: The constructed client. + """ + return FileUploadsServiceClient.from_service_account_file.__func__(FileUploadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FileUploadsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FileUploadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FileUploadsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = FileUploadsServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + FileUploadsServiceTransport, + Callable[..., FileUploadsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the file uploads service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FileUploadsServiceTransport,Callable[..., FileUploadsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FileUploadsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FileUploadsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_file_upload( + self, + request: Optional[Union[fileuploads.GetFileUploadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> fileuploads.FileUpload: + r"""Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file_upload(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest, dict]]): + The request object. Request message for the + GetFileUploadRequest method. + name (:class:`str`): + Required. The name of the data source file upload to + retrieve. Format: + ``accounts/{account}/dataSources/{datasource}/fileUploads/latest`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.FileUpload: + The file upload of a specific data + source, that is, the result of the + retrieval of the data source at a + certain timestamp computed + asynchronously when the data source + processing is finished. Only applicable + to file data sources. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, fileuploads.GetFileUploadRequest): + request = fileuploads.GetFileUploadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_file_upload + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "FileUploadsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FileUploadsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py new file mode 100644 index 000000000000..45141cbec8cf --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py @@ -0,0 +1,815 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .transports.base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport +from .transports.grpc import FileUploadsServiceGrpcTransport +from .transports.grpc_asyncio import FileUploadsServiceGrpcAsyncIOTransport +from .transports.rest import FileUploadsServiceRestTransport + + +class FileUploadsServiceClientMeta(type): + """Metaclass for the FileUploadsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[FileUploadsServiceTransport]] + _transport_registry["grpc"] = FileUploadsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = FileUploadsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = FileUploadsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[FileUploadsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FileUploadsServiceClient(metaclass=FileUploadsServiceClientMeta): + """Service to manage data source file uploads.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FileUploadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FileUploadsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def file_upload_path( + account: str, + datasource: str, + fileupload: str, + ) -> str: + """Returns a fully-qualified file_upload string.""" + return "accounts/{account}/dataSources/{datasource}/fileUploads/{fileupload}".format( + account=account, + datasource=datasource, + fileupload=fileupload, + ) + + @staticmethod + def parse_file_upload_path(path: str) -> Dict[str, str]: + """Parses a file_upload path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/dataSources/(?P.+?)/fileUploads/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = FileUploadsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or FileUploadsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + FileUploadsServiceTransport, + Callable[..., FileUploadsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the file uploads service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FileUploadsServiceTransport,Callable[..., FileUploadsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FileUploadsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = FileUploadsServiceClient._read_environment_variables() + self._client_cert_source = FileUploadsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = FileUploadsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, FileUploadsServiceTransport) + if transport_provided: + # transport is a FileUploadsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(FileUploadsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or FileUploadsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[FileUploadsServiceTransport], + Callable[..., FileUploadsServiceTransport], + ] = ( + FileUploadsServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., FileUploadsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_file_upload( + self, + request: Optional[Union[fileuploads.GetFileUploadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> fileuploads.FileUpload: + r"""Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = client.get_file_upload(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest, dict]): + The request object. Request message for the + GetFileUploadRequest method. + name (str): + Required. The name of the data source file upload to + retrieve. Format: + ``accounts/{account}/dataSources/{datasource}/fileUploads/latest`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.FileUpload: + The file upload of a specific data + source, that is, the result of the + retrieval of the data source at a + certain timestamp computed + asynchronously when the data source + processing is finished. Only applicable + to file data sources. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, fileuploads.GetFileUploadRequest): + request = fileuploads.GetFileUploadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_file_upload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "FileUploadsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FileUploadsServiceClient",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py new file mode 100644 index 000000000000..c3db09f22723 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FileUploadsServiceTransport +from .grpc import FileUploadsServiceGrpcTransport +from .grpc_asyncio import FileUploadsServiceGrpcAsyncIOTransport +from .rest import FileUploadsServiceRestInterceptor, FileUploadsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[FileUploadsServiceTransport]] +_transport_registry["grpc"] = FileUploadsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = FileUploadsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = FileUploadsServiceRestTransport + +__all__ = ( + "FileUploadsServiceTransport", + "FileUploadsServiceGrpcTransport", + "FileUploadsServiceGrpcAsyncIOTransport", + "FileUploadsServiceRestTransport", + "FileUploadsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py new file mode 100644 index 000000000000..a55aee7a66de --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class FileUploadsServiceTransport(abc.ABC): + """Abstract transport class for FileUploadsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_file_upload: gapic_v1.method.wrap_method( + self.get_file_upload, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_file_upload( + self, + ) -> Callable[ + [fileuploads.GetFileUploadRequest], + Union[fileuploads.FileUpload, Awaitable[fileuploads.FileUpload]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("FileUploadsServiceTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py new file mode 100644 index 000000000000..7bbea5efafca --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport + + +class FileUploadsServiceGrpcTransport(FileUploadsServiceTransport): + """gRPC backend transport for FileUploadsService. + + Service to manage data source file uploads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_file_upload( + self, + ) -> Callable[[fileuploads.GetFileUploadRequest], fileuploads.FileUpload]: + r"""Return a callable for the get file upload method over gRPC. + + Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + Returns: + Callable[[~.GetFileUploadRequest], + ~.FileUpload]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_file_upload" not in self._stubs: + self._stubs["get_file_upload"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.FileUploadsService/GetFileUpload", + request_serializer=fileuploads.GetFileUploadRequest.serialize, + response_deserializer=fileuploads.FileUpload.deserialize, + ) + return self._stubs["get_file_upload"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("FileUploadsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..fc8a254fac63 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport +from .grpc import FileUploadsServiceGrpcTransport + + +class FileUploadsServiceGrpcAsyncIOTransport(FileUploadsServiceTransport): + """gRPC AsyncIO backend transport for FileUploadsService. + + Service to manage data source file uploads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_file_upload( + self, + ) -> Callable[ + [fileuploads.GetFileUploadRequest], Awaitable[fileuploads.FileUpload] + ]: + r"""Return a callable for the get file upload method over gRPC. + + Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + Returns: + Callable[[~.GetFileUploadRequest], + Awaitable[~.FileUpload]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_file_upload" not in self._stubs: + self._stubs["get_file_upload"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.FileUploadsService/GetFileUpload", + request_serializer=fileuploads.GetFileUploadRequest.serialize, + response_deserializer=fileuploads.FileUpload.deserialize, + ) + return self._stubs["get_file_upload"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_file_upload: gapic_v1.method_async.wrap_method( + self.get_file_upload, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("FileUploadsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py new file mode 100644 index 000000000000..1647f7ff13a9 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import FileUploadsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FileUploadsServiceRestInterceptor: + """Interceptor for FileUploadsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FileUploadsServiceRestTransport. + + .. code-block:: python + class MyCustomFileUploadsServiceInterceptor(FileUploadsServiceRestInterceptor): + def pre_get_file_upload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_file_upload(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FileUploadsServiceRestTransport(interceptor=MyCustomFileUploadsServiceInterceptor()) + client = FileUploadsServiceClient(transport=transport) + + + """ + + def pre_get_file_upload( + self, + request: fileuploads.GetFileUploadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[fileuploads.GetFileUploadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_file_upload + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileUploadsService server. + """ + return request, metadata + + def post_get_file_upload( + self, response: fileuploads.FileUpload + ) -> fileuploads.FileUpload: + """Post-rpc interceptor for get_file_upload + + Override in a subclass to manipulate the response + after it is returned by the FileUploadsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FileUploadsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FileUploadsServiceRestInterceptor + + +class FileUploadsServiceRestTransport(FileUploadsServiceTransport): + """REST backend transport for FileUploadsService. + + Service to manage data source file uploads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FileUploadsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FileUploadsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetFileUpload(FileUploadsServiceRestStub): + def __hash__(self): + return hash("GetFileUpload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: fileuploads.GetFileUploadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> fileuploads.FileUpload: + r"""Call the get file upload method over HTTP. + + Args: + request (~.fileuploads.GetFileUploadRequest): + The request object. Request message for the + GetFileUploadRequest method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.fileuploads.FileUpload: + The file upload of a specific data + source, that is, the result of the + retrieval of the data source at a + certain timestamp computed + asynchronously when the data source + processing is finished. Only applicable + to file data sources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/datasources/v1beta/{name=accounts/*/dataSources/*/fileUploads/*}", + }, + ] + request, metadata = self._interceptor.pre_get_file_upload(request, metadata) + pb_request = fileuploads.GetFileUploadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = fileuploads.FileUpload() + pb_resp = fileuploads.FileUpload.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_file_upload(resp) + return resp + + @property + def get_file_upload( + self, + ) -> Callable[[fileuploads.GetFileUploadRequest], fileuploads.FileUpload]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFileUpload(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FileUploadsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py index 495c5a32635f..22df9907872e 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py @@ -24,6 +24,7 @@ UpdateDataSourceRequest, ) from .datasourcetypes import ( + DataSourceReference, LocalInventoryDataSource, PrimaryProductDataSource, PromotionDataSource, @@ -31,6 +32,7 @@ SupplementalProductDataSource, ) from .fileinputs import FileInput +from .fileuploads import FileUpload, GetFileUploadRequest __all__ = ( "CreateDataSourceRequest", @@ -41,10 +43,13 @@ "ListDataSourcesRequest", "ListDataSourcesResponse", "UpdateDataSourceRequest", + "DataSourceReference", "LocalInventoryDataSource", "PrimaryProductDataSource", "PromotionDataSource", "RegionalInventoryDataSource", "SupplementalProductDataSource", "FileInput", + "FileUpload", + "GetFileUploadRequest", ) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py index 61ec51caa2ca..ca1671d2461a 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py @@ -27,6 +27,7 @@ "LocalInventoryDataSource", "RegionalInventoryDataSource", "PromotionDataSource", + "DataSourceReference", }, ) @@ -76,6 +77,10 @@ class PrimaryProductDataSource(proto.Message): Optional. The countries where the items may be displayed. Represented as a `CLDR territory code `__. + default_rule (google.shopping.merchant_datasources_v1beta.types.PrimaryProductDataSource.DefaultRule): + Optional. Default rule management of the data + source. If set, the linked data sources will be + replaced. """ class Channel(proto.Enum): @@ -93,13 +98,53 @@ class Channel(proto.Enum): Local product. PRODUCTS (3): Unified data source for both local and online - products. + products. Note: Products management through the + API is not possible for this channel. """ CHANNEL_UNSPECIFIED = 0 ONLINE_PRODUCTS = 1 LOCAL_PRODUCTS = 2 PRODUCTS = 3 + class DefaultRule(proto.Message): + r"""Default rule management of the data source. + + Attributes: + take_from_data_sources (MutableSequence[google.shopping.merchant_datasources_v1beta.types.DataSourceReference]): + Required. The list of data sources linked in the `default + rule `__. + This list is ordered by the default rule priority of joining + the data. It might include none or multiple references to + ``self`` and supplemental data sources. + + The list must not be empty. + + To link the data source to the default rule, you need to add + a new reference to this list (in sequential order). + + To unlink the data source from the default rule, you need to + remove the given reference from this list. To create + attribute rules that are different from the default rule, + see `Set up your attribute + rules `__. + + Changing the order of this list will result in changing the + priority of data sources in the default rule. + + For example, providing the following list: [``1001``, + ``self``] will take attribute values from supplemental data + source ``1001``, and fallback to ``self`` if the attribute + is not set in ``1001``. + """ + + take_from_data_sources: MutableSequence[ + "DataSourceReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSourceReference", + ) + channel: Channel = proto.Field( proto.ENUM, number=3, @@ -119,10 +164,22 @@ class Channel(proto.Enum): proto.STRING, number=6, ) + default_rule: DefaultRule = proto.Field( + proto.MESSAGE, + number=7, + message=DefaultRule, + ) class SupplementalProductDataSource(proto.Message): r"""The supplemental data source for local and online products. + Supplemental API data sources must not have ``feedLabel`` and + ``contentLanguage`` fields set. You can only use supplemental data + sources to update existing products. For information about creating + a supplemental data source, see `Create a supplemental data source + and link it to the primary data + source `__. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -159,6 +216,12 @@ class SupplementalProductDataSource(proto.Message): produts without that restriction. This field is a member of `oneof`_ ``_content_language``. + referencing_primary_data_sources (MutableSequence[google.shopping.merchant_datasources_v1beta.types.DataSourceReference]): + Output only. The (unordered and deduplicated) + list of all primary data sources linked to this + data source in either default or custom rules. + Supplemental data source cannot be deleted + before all links are removed. """ feed_label: str = proto.Field( @@ -171,6 +234,13 @@ class SupplementalProductDataSource(proto.Message): number=5, optional=True, ) + referencing_primary_data_sources: MutableSequence[ + "DataSourceReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="DataSourceReference", + ) class LocalInventoryDataSource(proto.Message): @@ -257,4 +327,50 @@ class PromotionDataSource(proto.Message): ) +class DataSourceReference(proto.Message): + r"""Data source reference can be used to manage related data + sources within the data source service. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + self_ (bool): + Self should be used to reference the primary + data source itself. + + This field is a member of `oneof`_ ``data_source_id``. + primary_data_source_name (str): + Optional. The name of the primary data source. Format: + ``accounts/{account}/dataSources/{datasource}`` + + This field is a member of `oneof`_ ``data_source_id``. + supplemental_data_source_name (str): + Optional. The name of the supplemental data source. Format: + ``accounts/{account}/dataSources/{datasource}`` + + This field is a member of `oneof`_ ``data_source_id``. + """ + + self_: bool = proto.Field( + proto.BOOL, + number=1, + oneof="data_source_id", + ) + primary_data_source_name: str = proto.Field( + proto.STRING, + number=3, + oneof="data_source_id", + ) + supplemental_data_source_name: str = proto.Field( + proto.STRING, + number=2, + oneof="data_source_id", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py new file mode 100644 index 000000000000..c2369af53cb5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.datasources.v1beta", + manifest={ + "FileUpload", + "GetFileUploadRequest", + }, +) + + +class FileUpload(proto.Message): + r"""The file upload of a specific data source, that is, the + result of the retrieval of the data source at a certain + timestamp computed asynchronously when the data source + processing is finished. Only applicable to file data sources. + + Attributes: + name (str): + Identifier. The name of the data source file upload. Format: + ``{datasource.name=accounts/{account}/dataSources/{datasource}/fileUploads/{fileupload}}`` + data_source_id (int): + Output only. The data source id. + processing_state (google.shopping.merchant_datasources_v1beta.types.FileUpload.ProcessingState): + Output only. The processing state of the data + source. + issues (MutableSequence[google.shopping.merchant_datasources_v1beta.types.FileUpload.Issue]): + Output only. The list of issues occurring in + the data source. + items_total (int): + Output only. The number of items in the data + source that were processed. + items_created (int): + Output only. The number of items in the data + source that were created. + items_updated (int): + Output only. The number of items in the data + source that were updated. + upload_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date at which the file of + the data source was uploaded. + """ + + class ProcessingState(proto.Enum): + r"""The processing state of the data source. + + Values: + PROCESSING_STATE_UNSPECIFIED (0): + Processing state unspecified. + FAILED (1): + The data source could not be processed or all + the items had errors. + IN_PROGRESS (2): + The data source is being processed. + SUCCEEDED (3): + The data source was processed successfully, + though some items might have had errors. + """ + PROCESSING_STATE_UNSPECIFIED = 0 + FAILED = 1 + IN_PROGRESS = 2 + SUCCEEDED = 3 + + class Issue(proto.Message): + r"""An error occurring in the data source, like "invalid price". + + Attributes: + title (str): + Output only. The title of the issue, for + example, "Item too big". + description (str): + Output only. The error description, for + example, "Your data source contains items which + have too many attributes, or are too big. These + items will be dropped". + code (str): + Output only. The code of the error, for example, + "validation/invalid_value". Returns "?" if the code is + unknown. + count (int): + Output only. The number of occurrences of the + error in the file upload. + severity (google.shopping.merchant_datasources_v1beta.types.FileUpload.Issue.Severity): + Output only. The severity of the issue. + documentation_uri (str): + Output only. Link to the documentation + explaining the issue in more details, if + available. + """ + + class Severity(proto.Enum): + r"""The severity of the issue. + + Values: + SEVERITY_UNSPECIFIED (0): + Severity unspecified. + WARNING (1): + The issue is the warning. + ERROR (2): + The issue is an error. + """ + SEVERITY_UNSPECIFIED = 0 + WARNING = 1 + ERROR = 2 + + title: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + code: str = proto.Field( + proto.STRING, + number=3, + ) + count: int = proto.Field( + proto.INT64, + number=4, + ) + severity: "FileUpload.Issue.Severity" = proto.Field( + proto.ENUM, + number=5, + enum="FileUpload.Issue.Severity", + ) + documentation_uri: str = proto.Field( + proto.STRING, + number=6, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_id: int = proto.Field( + proto.INT64, + number=2, + ) + processing_state: ProcessingState = proto.Field( + proto.ENUM, + number=3, + enum=ProcessingState, + ) + issues: MutableSequence[Issue] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Issue, + ) + items_total: int = proto.Field( + proto.INT64, + number=5, + ) + items_created: int = proto.Field( + proto.INT64, + number=6, + ) + items_updated: int = proto.Field( + proto.INT64, + number=7, + ) + upload_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + + +class GetFileUploadRequest(proto.Message): + r"""Request message for the GetFileUploadRequest method. + + Attributes: + name (str): + Required. The name of the data source file upload to + retrieve. Format: + ``accounts/{account}/dataSources/{datasource}/fileUploads/latest`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py new file mode 100644 index 000000000000..69eed065c6f5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFileUpload +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file_upload(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py new file mode 100644 index 000000000000..8ca612c4e3bd --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFileUpload +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = client.get_file_upload(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json index ee381d03839d..8af0e5a52d60 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json @@ -972,6 +972,167 @@ } ], "title": "merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceAsyncClient", + "shortName": "FileUploadsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceAsyncClient.get_file_upload", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService.GetFileUpload", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService", + "shortName": "FileUploadsService" + }, + "shortName": "GetFileUpload" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.FileUpload", + "shortName": "get_file_upload" + }, + "description": "Sample for GetFileUpload", + "file": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceClient", + "shortName": "FileUploadsServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceClient.get_file_upload", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService.GetFileUpload", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService", + "shortName": "FileUploadsService" + }, + "shortName": "GetFileUpload" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.FileUpload", + "shortName": "get_file_upload" + }, + "description": "Sample for GetFileUpload", + "file": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py" } ] } diff --git a/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py b/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py index 77f6b0db701f..74286e5cd17b 100644 --- a/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py +++ b/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py @@ -43,6 +43,7 @@ class merchant_datasourcesCallTransformer(cst.CSTTransformer): 'delete_data_source': ('name', ), 'fetch_data_source': ('name', ), 'get_data_source': ('name', ), + 'get_file_upload': ('name', ), 'list_data_sources': ('parent', 'page_size', 'page_token', ), 'update_data_source': ('data_source', 'update_mask', ), } diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py index 9bd09642ee77..5ef814bac36e 100644 --- a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py @@ -4366,10 +4366,20 @@ def test_create_data_source_rest(request_type): "feed_label": "feed_label_value", "content_language": "content_language_value", "countries": ["countries_value1", "countries_value2"], + "default_rule": { + "take_from_data_sources": [ + { + "self_": True, + "primary_data_source_name": "primary_data_source_name_value", + "supplemental_data_source_name": "supplemental_data_source_name_value", + } + ] + }, }, "supplemental_product_data_source": { "feed_label": "feed_label_value", "content_language": "content_language_value", + "referencing_primary_data_sources": {}, }, "local_inventory_data_source": { "feed_label": "feed_label_value", @@ -4813,10 +4823,20 @@ def test_update_data_source_rest(request_type): "feed_label": "feed_label_value", "content_language": "content_language_value", "countries": ["countries_value1", "countries_value2"], + "default_rule": { + "take_from_data_sources": [ + { + "self_": True, + "primary_data_source_name": "primary_data_source_name_value", + "supplemental_data_source_name": "supplemental_data_source_name_value", + } + ] + }, }, "supplemental_product_data_source": { "feed_label": "feed_label_value", "content_language": "content_language_value", + "referencing_primary_data_sources": {}, }, "local_inventory_data_source": { "feed_label": "feed_label_value", diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py new file mode 100644 index 000000000000..7c1b989a8758 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py @@ -0,0 +1,2632 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_datasources_v1beta.services.file_uploads_service import ( + FileUploadsServiceAsyncClient, + FileUploadsServiceClient, + transports, +) +from google.shopping.merchant_datasources_v1beta.types import fileuploads + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FileUploadsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + FileUploadsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + FileUploadsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert FileUploadsServiceClient._get_client_cert_source(None, False) is None + assert ( + FileUploadsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + FileUploadsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + FileUploadsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + FileUploadsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + default_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + FileUploadsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileUploadsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileUploadsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + FileUploadsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + FileUploadsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + FileUploadsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + FileUploadsServiceClient._get_universe_domain(None, None) + == FileUploadsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + FileUploadsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport, "grpc"), + (FileUploadsServiceClient, transports.FileUploadsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FileUploadsServiceClient, "grpc"), + (FileUploadsServiceAsyncClient, "grpc_asyncio"), + (FileUploadsServiceClient, "rest"), + ], +) +def test_file_uploads_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.FileUploadsServiceGrpcTransport, "grpc"), + (transports.FileUploadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FileUploadsServiceRestTransport, "rest"), + ], +) +def test_file_uploads_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FileUploadsServiceClient, "grpc"), + (FileUploadsServiceAsyncClient, "grpc_asyncio"), + (FileUploadsServiceClient, "rest"), + ], +) +def test_file_uploads_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +def test_file_uploads_service_client_get_transport_class(): + transport = FileUploadsServiceClient.get_transport_class() + available_transports = [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceRestTransport, + ] + assert transport in available_transports + + transport = FileUploadsServiceClient.get_transport_class("grpc") + assert transport == transports.FileUploadsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport, "grpc"), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FileUploadsServiceClient, transports.FileUploadsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +def test_file_uploads_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FileUploadsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FileUploadsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + "true", + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + "false", + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceRestTransport, + "rest", + "true", + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_file_uploads_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [FileUploadsServiceClient, FileUploadsServiceAsyncClient] +) +@mock.patch.object( + FileUploadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FileUploadsServiceAsyncClient), +) +def test_file_uploads_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [FileUploadsServiceClient, FileUploadsServiceAsyncClient] +) +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +def test_file_uploads_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + default_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport, "grpc"), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FileUploadsServiceClient, transports.FileUploadsServiceRestTransport, "rest"), + ], +) +def test_file_uploads_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_file_uploads_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_file_uploads_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = FileUploadsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_file_uploads_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + fileuploads.GetFileUploadRequest, + dict, + ], +) +def test_get_file_upload(request_type, transport: str = "grpc"): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + response = client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = fileuploads.GetFileUploadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, fileuploads.FileUpload) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.processing_state == fileuploads.FileUpload.ProcessingState.FAILED + assert response.items_total == 1189 + assert response.items_created == 1369 + assert response.items_updated == 1384 + + +def test_get_file_upload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_file_upload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == fileuploads.GetFileUploadRequest() + + +def test_get_file_upload_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = fileuploads.GetFileUploadRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_file_upload(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == fileuploads.GetFileUploadRequest( + name="name_value", + ) + + +def test_get_file_upload_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file_upload in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file_upload] = mock_rpc + request = {} + client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file_upload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_file_upload_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + ) + response = await client.get_file_upload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == fileuploads.GetFileUploadRequest() + + +@pytest.mark.asyncio +async def test_get_file_upload_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_file_upload + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_file_upload + ] = mock_rpc + + request = {} + await client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_file_upload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_file_upload_async( + transport: str = "grpc_asyncio", request_type=fileuploads.GetFileUploadRequest +): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + ) + response = await client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = fileuploads.GetFileUploadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, fileuploads.FileUpload) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.processing_state == fileuploads.FileUpload.ProcessingState.FAILED + assert response.items_total == 1189 + assert response.items_created == 1369 + assert response.items_updated == 1384 + + +@pytest.mark.asyncio +async def test_get_file_upload_async_from_dict(): + await test_get_file_upload_async(request_type=dict) + + +def test_get_file_upload_field_headers(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = fileuploads.GetFileUploadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value = fileuploads.FileUpload() + client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_file_upload_field_headers_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = fileuploads.GetFileUploadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload() + ) + await client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_file_upload_flattened(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = fileuploads.FileUpload() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_file_upload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_file_upload_flattened_error(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file_upload( + fileuploads.GetFileUploadRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_file_upload_flattened_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = fileuploads.FileUpload() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_file_upload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_file_upload_flattened_error_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_file_upload( + fileuploads.GetFileUploadRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + fileuploads.GetFileUploadRequest, + dict, + ], +) +def test_get_file_upload_rest(request_type): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2/fileUploads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = fileuploads.FileUpload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_file_upload(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, fileuploads.FileUpload) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.processing_state == fileuploads.FileUpload.ProcessingState.FAILED + assert response.items_total == 1189 + assert response.items_created == 1369 + assert response.items_updated == 1384 + + +def test_get_file_upload_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file_upload in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file_upload] = mock_rpc + + request = {} + client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file_upload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_file_upload_rest_required_fields( + request_type=fileuploads.GetFileUploadRequest, +): + transport_class = transports.FileUploadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file_upload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file_upload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = fileuploads.FileUpload() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = fileuploads.FileUpload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_file_upload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_file_upload_rest_unset_required_fields(): + transport = transports.FileUploadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_file_upload._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_file_upload_rest_interceptors(null_interceptor): + transport = transports.FileUploadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FileUploadsServiceRestInterceptor(), + ) + client = FileUploadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FileUploadsServiceRestInterceptor, "post_get_file_upload" + ) as post, mock.patch.object( + transports.FileUploadsServiceRestInterceptor, "pre_get_file_upload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = fileuploads.GetFileUploadRequest.pb( + fileuploads.GetFileUploadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = fileuploads.FileUpload.to_json( + fileuploads.FileUpload() + ) + + request = fileuploads.GetFileUploadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = fileuploads.FileUpload() + + client.get_file_upload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_file_upload_rest_bad_request( + transport: str = "rest", request_type=fileuploads.GetFileUploadRequest +): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2/fileUploads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_file_upload(request) + + +def test_get_file_upload_rest_flattened(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = fileuploads.FileUpload() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "accounts/sample1/dataSources/sample2/fileUploads/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = fileuploads.FileUpload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_file_upload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{name=accounts/*/dataSources/*/fileUploads/*}" + % client.transport._host, + args[1], + ) + + +def test_get_file_upload_rest_flattened_error(transport: str = "rest"): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file_upload( + fileuploads.GetFileUploadRequest(), + name="name_value", + ) + + +def test_get_file_upload_rest_error(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FileUploadsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FileUploadsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + transports.FileUploadsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = FileUploadsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FileUploadsServiceGrpcTransport, + ) + + +def test_file_uploads_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FileUploadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_file_uploads_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.FileUploadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("get_file_upload",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_file_uploads_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FileUploadsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_file_uploads_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FileUploadsServiceTransport() + adc.assert_called_once() + + +def test_file_uploads_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FileUploadsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + transports.FileUploadsServiceRestTransport, + ], +) +def test_file_uploads_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FileUploadsServiceGrpcTransport, grpc_helpers), + (transports.FileUploadsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_file_uploads_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_file_uploads_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.FileUploadsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_file_uploads_service_host_no_port(transport_name): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_file_uploads_service_host_with_port(transport_name): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_file_uploads_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FileUploadsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FileUploadsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_file_upload._session + session2 = client2.transport.get_file_upload._session + assert session1 != session2 + + +def test_file_uploads_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FileUploadsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_file_uploads_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FileUploadsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_file_upload_path(): + account = "squid" + datasource = "clam" + fileupload = "whelk" + expected = ( + "accounts/{account}/dataSources/{datasource}/fileUploads/{fileupload}".format( + account=account, + datasource=datasource, + fileupload=fileupload, + ) + ) + actual = FileUploadsServiceClient.file_upload_path(account, datasource, fileupload) + assert expected == actual + + +def test_parse_file_upload_path(): + expected = { + "account": "octopus", + "datasource": "oyster", + "fileupload": "nudibranch", + } + path = FileUploadsServiceClient.file_upload_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_file_upload_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = FileUploadsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = FileUploadsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = FileUploadsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = FileUploadsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = FileUploadsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = FileUploadsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = FileUploadsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = FileUploadsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = FileUploadsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = FileUploadsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.FileUploadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.FileUploadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FileUploadsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 6db79dc964b540f1c9c21d96122e4916aca66d98 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:53:34 +0000 Subject: [PATCH 57/59] feat: [google-ads-admanager] Added support for Interactive Reporting (#13123) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: Added support for Interactive Reporting fix!: Removed closed beta services that had data discrepancies with the SOAP API END_COMMIT_OVERRIDE Temporarily removed the LineItem, Creative, Contact, Label, and Team services until data discrepancies with the SOAP API are resolved. PiperOrigin-RevId: 681864022 Source-Link: https://github.com/googleapis/googleapis/commit/672cd6a381c7a0aea16438e2335dc7799bd70e4d Source-Link: https://github.com/googleapis/googleapis-gen/commit/039270990aba3bf810f2a274e8ffdd2fd6a954e1 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFkcy1hZG1hbmFnZXIvLk93bEJvdC55YW1sIiwiaCI6IjAzOTI3MDk5MGFiYTNiZjgxMGYyYTI3NGU4ZmZkZDJmZDZhOTU0ZTEifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../docs/admanager_v1/ad_partner_service.rst | 10 - .../docs/admanager_v1/contact_service.rst | 10 - .../docs/admanager_v1/creative_service.rst | 10 - .../entity_signals_mapping_service.rst | 10 + .../docs/admanager_v1/label_service.rst | 10 - .../docs/admanager_v1/line_item_service.rst | 10 - .../docs/admanager_v1/report_service.rst | 4 + .../docs/admanager_v1/services_.rst | 8 +- .../taxonomy_category_service.rst | 10 + .../docs/admanager_v1/team_service.rst | 10 - .../docs/admanager_v1/user_service.rst | 4 - .../google/ads/admanager/__init__.py | 265 +- .../google/ads/admanager_v1/__init__.py | 236 +- .../ads/admanager_v1/gapic_metadata.json | 154 +- .../ad_partner_service/transports/__init__.py | 30 - .../services/ad_unit_service/client.py | 122 +- .../services/ad_unit_service/pagers.py | 78 +- .../ad_unit_service/transports/base.py | 21 +- .../ad_unit_service/transports/rest.py | 153 +- .../services/company_service/client.py | 6 +- .../services/company_service/pagers.py | 4 +- .../company_service/transports/base.py | 4 +- .../company_service/transports/rest.py | 22 +- .../services/contact_service/__init__.py | 18 - .../services/contact_service/client.py | 986 ---- .../services/contact_service/pagers.py | 115 - .../contact_service/transports/__init__.py | 30 - .../contact_service/transports/base.py | 188 - .../contact_service/transports/rest.py | 526 -- .../creative_service/transports/rest.py | 527 -- .../services/custom_field_service/client.py | 12 +- .../services/custom_field_service/pagers.py | 4 +- .../custom_field_service/transports/base.py | 6 +- .../custom_field_service/transports/rest.py | 24 +- .../custom_targeting_key_service/client.py | 3 +- .../custom_targeting_key_service/pagers.py | 7 +- .../transports/base.py | 9 +- .../transports/rest.py | 23 +- .../custom_targeting_value_service/client.py | 3 +- .../custom_targeting_value_service/pagers.py | 9 +- .../transports/base.py | 9 +- .../transports/rest.py | 23 +- .../__init__.py | 4 +- .../client.py | 785 ++- .../pagers.py | 45 +- .../transports/__init__.py | 19 +- .../transports/base.py | 108 +- .../transports/rest.py | 1153 ++++ .../services/label_service/__init__.py | 18 - .../services/label_service/client.py | 977 ---- .../services/label_service/pagers.py | 115 - .../label_service/transports/__init__.py | 30 - .../services/label_service/transports/base.py | 188 - .../services/label_service/transports/rest.py | 520 -- .../services/line_item_service/__init__.py | 18 - .../services/line_item_service/client.py | 1021 ---- .../services/line_item_service/pagers.py | 115 - .../line_item_service/transports/__init__.py | 30 - .../line_item_service/transports/base.py | 188 - .../line_item_service/transports/rest.py | 528 -- .../services/network_service/client.py | 77 +- .../network_service/transports/base.py | 21 +- .../network_service/transports/rest.py | 135 +- .../services/order_service/client.py | 29 +- .../services/order_service/pagers.py | 4 +- .../services/order_service/transports/base.py | 4 +- .../services/order_service/transports/rest.py | 18 +- .../services/placement_service/client.py | 8 +- .../services/placement_service/pagers.py | 4 +- .../placement_service/transports/base.py | 4 +- .../placement_service/transports/rest.py | 22 +- .../services/report_service/client.py | 660 ++- .../services/report_service/pagers.py | 189 + .../report_service/transports/base.py | 84 +- .../report_service/transports/rest.py | 691 ++- .../services/role_service/client.py | 23 +- .../services/role_service/pagers.py | 4 +- .../services/role_service/transports/base.py | 4 +- .../services/role_service/transports/rest.py | 30 +- .../__init__.py | 4 +- .../client.py | 210 +- .../pagers.py | 39 +- .../transports/__init__.py | 19 +- .../transports/base.py | 36 +- .../transports/rest.py | 184 +- .../services/team_service/__init__.py | 18 - .../services/team_service/client.py | 977 ---- .../services/team_service/pagers.py | 115 - .../services/team_service/transports/base.py | 187 - .../services/team_service/transports/rest.py | 516 -- .../services/user_service/client.py | 140 +- .../services/user_service/pagers.py | 115 - .../services/user_service/transports/base.py | 20 +- .../services/user_service/transports/rest.py | 144 +- .../google/ads/admanager_v1/types/__init__.py | 221 +- .../types/ad_partner_declaration.py | 80 - .../ads/admanager_v1/types/ad_unit_enums.py | 93 +- .../admanager_v1/types/ad_unit_messages.py | 369 ++ .../ads/admanager_v1/types/ad_unit_service.py | 463 +- .../ads/admanager_v1/types/ad_unit_size.py | 67 - .../admanager_v1/types/company_messages.py | 174 + .../ads/admanager_v1/types/company_service.py | 144 +- .../admanager_v1/types/company_type_enum.py | 4 - .../types/computed_status_enum.py | 90 - .../admanager_v1/types/contact_messages.py | 56 + .../ads/admanager_v1/types/contact_service.py | 174 - .../types/creative_placeholder.py | 99 - .../admanager_v1/types/creative_service.py | 229 - .../types/custom_field_messages.py | 138 + .../types/custom_field_service.py | 124 +- .../admanager_v1/types/custom_field_value.py | 114 + .../types/custom_targeting_key_messages.py | 93 + .../types/custom_targeting_key_service.py | 70 +- .../types/custom_targeting_value_messages.py | 81 + .../types/custom_targeting_value_service.py | 56 +- .../types/entity_signals_mapping_messages.py | 96 + .../types/entity_signals_mapping_service.py | 306 ++ .../ads/admanager_v1/types/frequency_cap.py | 46 +- .../google/ads/admanager_v1/types/goal.py | 204 - .../ads/admanager_v1/types/label_messages.py | 46 + .../ads/admanager_v1/types/label_service.py | 168 - .../ads/admanager_v1/types/line_item_enums.py | 314 -- .../admanager_v1/types/line_item_service.py | 491 -- .../admanager_v1/types/network_messages.py | 106 + .../ads/admanager_v1/types/network_service.py | 90 +- .../ads/admanager_v1/types/order_enums.py | 73 + .../ads/admanager_v1/types/order_messages.py | 278 + .../ads/admanager_v1/types/order_service.py | 273 +- .../admanager_v1/types/placement_messages.py | 102 + .../admanager_v1/types/placement_service.py | 77 +- .../ads/admanager_v1/types/report_service.py | 4663 ++++++++++++++++- .../ads/admanager_v1/types/role_enums.py | 52 + .../ads/admanager_v1/types/role_messages.py | 79 + .../ads/admanager_v1/types/role_service.py | 47 +- .../google/ads/admanager_v1/types/size.py | 55 +- .../ads/admanager_v1/types/size_type_enum.py | 78 + .../types/taxonomy_category_messages.py | 96 + ...ervice.py => taxonomy_category_service.py} | 78 +- .../admanager_v1/types/taxonomy_type_enum.py | 62 + .../ads/admanager_v1/types/team_messages.py | 54 + .../ads/admanager_v1/types/team_service.py | 168 - .../ads/admanager_v1/types/time_unit_enum.py | 69 + .../ads/admanager_v1/types/user_messages.py | 109 + .../ads/admanager_v1/types/user_service.py | 187 - ...d_unit_service_list_ad_unit_sizes_sync.py} | 14 +- ...tch_create_entity_signals_mappings_sync.py | 58 + ...tch_update_entity_signals_mappings_sync.py | 57 + ...vice_create_entity_signals_mapping_sync.py | 57 + ...ervice_get_entity_signals_mapping_sync.py} | 14 +- ...vice_list_entity_signals_mappings_sync.py} | 14 +- ...vice_update_entity_signals_mapping_sync.py | 56 + ...ed_line_item_service_get_line_item_sync.py | 52 - ...ted_network_service_list_networks_sync.py} | 15 +- ...ated_report_service_create_report_sync.py} | 23 +- ..._service_fetch_report_result_rows_sync.py} | 15 +- ...nerated_report_service_get_report_sync.py} | 14 +- ...rated_report_service_list_reports_sync.py} | 14 +- ...nerated_report_service_run_report_sync.py} | 14 +- ...ated_report_service_update_report_sync.py} | 21 +- ...ory_service_get_taxonomy_category_sync.py} | 14 +- ..._service_list_taxonomy_categories_sync.py} | 14 +- ..._generated_team_service_list_teams_sync.py | 53 - ...ppet_metadata_google.ads.admanager.v1.json | 1260 +++-- .../doc-formatting.yaml | 1 + .../scripts/fixup_admanager_v1_keywords.py | 30 +- .../admanager_v1/test_ad_unit_service.py | 458 +- .../admanager_v1/test_company_service.py | 42 +- .../admanager_v1/test_contact_service.py | 2180 -------- .../admanager_v1/test_creative_service.py | 2249 -------- .../admanager_v1/test_custom_field_service.py | 44 +- .../test_custom_targeting_key_service.py | 39 +- .../test_custom_targeting_value_service.py | 39 +- .../test_entity_signals_mapping_service.py | 3898 ++++++++++++++ .../gapic/admanager_v1/test_label_service.py | 2151 -------- .../admanager_v1/test_line_item_service.py | 2295 -------- .../admanager_v1/test_network_service.py | 190 +- .../gapic/admanager_v1/test_order_service.py | 133 +- .../admanager_v1/test_placement_service.py | 44 +- .../gapic/admanager_v1/test_report_service.py | 2084 +++++++- .../gapic/admanager_v1/test_role_service.py | 48 +- ...e.py => test_taxonomy_category_service.py} | 679 +-- .../gapic/admanager_v1/test_team_service.py | 2145 -------- .../gapic/admanager_v1/test_user_service.py | 469 +- .../doc-formatting.yaml | 24 + 184 files changed, 20721 insertions(+), 26016 deletions(-) delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/contact_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/creative_service.rst create mode 100644 packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/label_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst create mode 100644 packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/team_service.rst delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => entity_signals_mapping_service}/__init__.py (85%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => entity_signals_mapping_service}/client.py (53%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => entity_signals_mapping_service}/pagers.py (69%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{team_service => entity_signals_mapping_service}/transports/__init__.py (60%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => entity_signals_mapping_service}/transports/base.py (63%) create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/rest.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/__init__.py (86%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/client.py (84%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/pagers.py (71%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => taxonomy_category_service}/transports/__init__.py (61%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => taxonomy_category_service}/transports/base.py (86%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/transports/rest.py (73%) delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py rename packages/google-ads-admanager/google/ads/admanager_v1/types/{ad_partner_service.py => taxonomy_category_service.py} (63%) create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_line_item_service_list_line_items_sync.py => admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py} (81%) create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_creative_service_get_creative_sync.py => admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py} (77%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_label_service_list_labels_sync.py => admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py} (77%) create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py delete mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_get_line_item_sync.py rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_label_service_get_label_sync.py => admanager_v1_generated_network_service_list_networks_sync.py} (81%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_contact_service_list_contacts_sync.py => admanager_v1_generated_report_service_create_report_sync.py} (70%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_creative_service_list_creatives_sync.py => admanager_v1_generated_report_service_fetch_report_result_rows_sync.py} (79%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_team_service_get_team_sync.py => admanager_v1_generated_report_service_get_report_sync.py} (82%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_user_service_list_users_sync.py => admanager_v1_generated_report_service_list_reports_sync.py} (82%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_report_service_export_saved_report_sync.py => admanager_v1_generated_report_service_run_report_sync.py} (82%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_contact_service_get_contact_sync.py => admanager_v1_generated_report_service_update_report_sync.py} (71%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py => admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py} (78%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py => admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py} (78%) delete mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py create mode 120000 packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_contact_service.py delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py create mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_line_item_service.py rename packages/google-ads-admanager/tests/unit/gapic/admanager_v1/{test_ad_partner_service.py => test_taxonomy_category_service.py} (75%) delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_team_service.py diff --git a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst b/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst deleted file mode 100644 index 7ccc095d3628..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AdPartnerService ----------------------------------- - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst b/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst deleted file mode 100644 index 478ccc08a803..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ContactService --------------------------------- - -.. automodule:: google.ads.admanager_v1.services.contact_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.contact_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst b/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst deleted file mode 100644 index 2f4e457a7ab3..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CreativeService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.creative_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.creative_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst new file mode 100644 index 000000000000..d4e1f7fa5634 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst @@ -0,0 +1,10 @@ +EntitySignalsMappingService +--------------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst b/packages/google-ads-admanager/docs/admanager_v1/label_service.rst deleted file mode 100644 index f3408d1767f5..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LabelService ------------------------------- - -.. automodule:: google.ads.admanager_v1.services.label_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.label_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst b/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst deleted file mode 100644 index 6b4388d90085..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LineItemService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.line_item_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.line_item_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst index 96130cad2289..a655ad73d7a3 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst @@ -4,3 +4,7 @@ ReportService .. automodule:: google.ads.admanager_v1.services.report_service :members: :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.report_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/services_.rst b/packages/google-ads-admanager/docs/admanager_v1/services_.rst index a9b93b8a07c2..a1522b62dc40 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/services_.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/services_.rst @@ -3,20 +3,16 @@ Services for Google Ads Admanager v1 API .. toctree:: :maxdepth: 2 - ad_partner_service ad_unit_service company_service - contact_service - creative_service custom_field_service custom_targeting_key_service custom_targeting_value_service - label_service - line_item_service + entity_signals_mapping_service network_service order_service placement_service report_service role_service - team_service + taxonomy_category_service user_service diff --git a/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst new file mode 100644 index 000000000000..61f13e739e19 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst @@ -0,0 +1,10 @@ +TaxonomyCategoryService +----------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst b/packages/google-ads-admanager/docs/admanager_v1/team_service.rst deleted file mode 100644 index 4d3e14c6f6c1..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TeamService ------------------------------ - -.. automodule:: google.ads.admanager_v1.services.team_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.team_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst index 9bae86979749..c7be2db4394e 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst @@ -4,7 +4,3 @@ UserService .. automodule:: google.ads.admanager_v1.services.user_service :members: :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.user_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/google/ads/admanager/__init__.py b/packages/google-ads-admanager/google/ads/admanager/__init__.py index 39067144e427..9672929d7e9f 100644 --- a/packages/google-ads-admanager/google/ads/admanager/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager/__init__.py @@ -18,15 +18,8 @@ __version__ = package_version.__version__ -from google.ads.admanager_v1.services.ad_partner_service.client import ( - AdPartnerServiceClient, -) from google.ads.admanager_v1.services.ad_unit_service.client import AdUnitServiceClient from google.ads.admanager_v1.services.company_service.client import CompanyServiceClient -from google.ads.admanager_v1.services.contact_service.client import ContactServiceClient -from google.ads.admanager_v1.services.creative_service.client import ( - CreativeServiceClient, -) from google.ads.admanager_v1.services.custom_field_service.client import ( CustomFieldServiceClient, ) @@ -36,9 +29,8 @@ from google.ads.admanager_v1.services.custom_targeting_value_service.client import ( CustomTargetingValueServiceClient, ) -from google.ads.admanager_v1.services.label_service.client import LabelServiceClient -from google.ads.admanager_v1.services.line_item_service.client import ( - LineItemServiceClient, +from google.ads.admanager_v1.services.entity_signals_mapping_service.client import ( + EntitySignalsMappingServiceClient, ) from google.ads.admanager_v1.services.network_service.client import NetworkServiceClient from google.ads.admanager_v1.services.order_service.client import OrderServiceClient @@ -47,76 +39,66 @@ ) from google.ads.admanager_v1.services.report_service.client import ReportServiceClient from google.ads.admanager_v1.services.role_service.client import RoleServiceClient -from google.ads.admanager_v1.services.team_service.client import TeamServiceClient -from google.ads.admanager_v1.services.user_service.client import UserServiceClient -from google.ads.admanager_v1.types.ad_partner_declaration import ( - AdPartnerDeclaration, - DeclarationTypeEnum, +from google.ads.admanager_v1.services.taxonomy_category_service.client import ( + TaxonomyCategoryServiceClient, ) -from google.ads.admanager_v1.types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, +from google.ads.admanager_v1.services.user_service.client import UserServiceClient +from google.ads.admanager_v1.types.ad_unit_enums import ( + AdUnitStatusEnum, + SmartSizeModeEnum, + TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_enums import AppliedAdsenseEnabledEnum -from google.ads.admanager_v1.types.ad_unit_service import ( +from google.ads.admanager_v1.types.ad_unit_messages import ( AdUnit, AdUnitParent, - GetAdUnitRequest, + AdUnitSize, LabelFrequencyCap, +) +from google.ads.admanager_v1.types.ad_unit_service import ( + GetAdUnitRequest, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_size import AdUnitSize from google.ads.admanager_v1.types.admanager_error import AdManagerError from google.ads.admanager_v1.types.applied_label import AppliedLabel from google.ads.admanager_v1.types.company_credit_status_enum import ( CompanyCreditStatusEnum, ) +from google.ads.admanager_v1.types.company_messages import Company from google.ads.admanager_v1.types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from google.ads.admanager_v1.types.company_type_enum import CompanyTypeEnum -from google.ads.admanager_v1.types.computed_status_enum import ComputedStatusEnum -from google.ads.admanager_v1.types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from google.ads.admanager_v1.types.creative_placeholder import CreativePlaceholder -from google.ads.admanager_v1.types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from google.ads.admanager_v1.types.contact_messages import Contact from google.ads.admanager_v1.types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) -from google.ads.admanager_v1.types.custom_field_service import ( +from google.ads.admanager_v1.types.custom_field_messages import ( CustomField, CustomFieldOption, +) +from google.ads.admanager_v1.types.custom_field_service import ( GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from google.ads.admanager_v1.types.custom_field_value import CustomFieldValue from google.ads.admanager_v1.types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) -from google.ads.admanager_v1.types.custom_targeting_key_service import ( +from google.ads.admanager_v1.types.custom_targeting_key_messages import ( CustomTargetingKey, +) +from google.ads.admanager_v1.types.custom_targeting_key_service import ( GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -125,109 +107,114 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) -from google.ads.admanager_v1.types.custom_targeting_value_service import ( +from google.ads.admanager_v1.types.custom_targeting_value_messages import ( CustomTargetingValue, +) +from google.ads.admanager_v1.types.custom_targeting_value_service import ( GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum -from google.ads.admanager_v1.types.frequency_cap import FrequencyCap, TimeUnitEnum -from google.ads.admanager_v1.types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from google.ads.admanager_v1.types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, +from google.ads.admanager_v1.types.entity_signals_mapping_messages import ( + EntitySignalsMapping, ) -from google.ads.admanager_v1.types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from google.ads.admanager_v1.types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from google.ads.admanager_v1.types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, +from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum +from google.ads.admanager_v1.types.frequency_cap import FrequencyCap +from google.ads.admanager_v1.types.label_messages import Label +from google.ads.admanager_v1.types.network_messages import Network +from google.ads.admanager_v1.types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) -from google.ads.admanager_v1.types.network_service import GetNetworkRequest, Network +from google.ads.admanager_v1.types.order_enums import OrderStatusEnum +from google.ads.admanager_v1.types.order_messages import Order from google.ads.admanager_v1.types.order_service import ( GetOrderRequest, ListOrdersRequest, ListOrdersResponse, - Order, ) from google.ads.admanager_v1.types.placement_enums import PlacementStatusEnum +from google.ads.admanager_v1.types.placement_messages import Placement from google.ads.admanager_v1.types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from google.ads.admanager_v1.types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) +from google.ads.admanager_v1.types.role_enums import RoleStatusEnum +from google.ads.admanager_v1.types.role_messages import Role from google.ads.admanager_v1.types.role_service import ( GetRoleRequest, ListRolesRequest, ListRolesResponse, - Role, -) -from google.ads.admanager_v1.types.size import Size, SizeTypeEnum -from google.ads.admanager_v1.types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, ) -from google.ads.admanager_v1.types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from google.ads.admanager_v1.types.size import Size +from google.ads.admanager_v1.types.size_type_enum import SizeTypeEnum +from google.ads.admanager_v1.types.taxonomy_category_messages import TaxonomyCategory +from google.ads.admanager_v1.types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from google.ads.admanager_v1.types.taxonomy_type_enum import TaxonomyTypeEnum +from google.ads.admanager_v1.types.team_messages import Team +from google.ads.admanager_v1.types.time_unit_enum import TimeUnitEnum +from google.ads.admanager_v1.types.user_messages import User +from google.ads.admanager_v1.types.user_service import GetUserRequest __all__ = ( - "AdPartnerServiceClient", "AdUnitServiceClient", "CompanyServiceClient", - "ContactServiceClient", - "CreativeServiceClient", "CustomFieldServiceClient", "CustomTargetingKeyServiceClient", "CustomTargetingValueServiceClient", - "LabelServiceClient", - "LineItemServiceClient", + "EntitySignalsMappingServiceClient", "NetworkServiceClient", "OrderServiceClient", "PlacementServiceClient", "ReportServiceClient", "RoleServiceClient", - "TeamServiceClient", + "TaxonomyCategoryServiceClient", "UserServiceClient", - "AdPartnerDeclaration", - "DeclarationTypeEnum", - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", "AdUnit", "AdUnitParent", - "GetAdUnitRequest", + "AdUnitSize", "LabelFrequencyCap", + "GetAdUnitRequest", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", - "SmartSizeModeEnum", - "TargetWindowEnum", - "AdUnitSize", "AdManagerError", "AppliedLabel", "CompanyCreditStatusEnum", @@ -236,16 +223,7 @@ "ListCompaniesRequest", "ListCompaniesResponse", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - "CreativePlaceholder", - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldStatusEnum", @@ -255,6 +233,7 @@ "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", + "CustomFieldValue", "CustomTargetingKeyReportableTypeEnum", "CustomTargetingKeyStatusEnum", "CustomTargetingKeyTypeEnum", @@ -268,53 +247,61 @@ "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", + "EntitySignalsMapping", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + "CreateEntitySignalsMappingRequest", + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "UpdateEntitySignalsMappingRequest", "EnvironmentTypeEnum", "FrequencyCap", - "TimeUnitEnum", - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - "GetLabelRequest", "Label", - "ListLabelsRequest", - "ListLabelsResponse", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - "GetLineItemRequest", - "LineItem", - "ListLineItemsRequest", - "ListLineItemsResponse", - "GetNetworkRequest", "Network", + "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", + "OrderStatusEnum", + "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", - "Order", "PlacementStatusEnum", + "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", - "Placement", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "CreateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", "Report", + "ReportDefinition", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", + "UpdateReportRequest", + "RoleStatusEnum", + "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", - "Role", "Size", "SizeTypeEnum", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", + "TaxonomyCategory", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", + "TaxonomyTypeEnum", "Team", - "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", + "TimeUnitEnum", "User", + "GetUserRequest", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py index b13eac5f2835..e2d73bf488ba 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py @@ -18,86 +18,59 @@ __version__ = package_version.__version__ -from .services.ad_partner_service import AdPartnerServiceClient from .services.ad_unit_service import AdUnitServiceClient from .services.company_service import CompanyServiceClient -from .services.contact_service import ContactServiceClient -from .services.creative_service import CreativeServiceClient from .services.custom_field_service import CustomFieldServiceClient from .services.custom_targeting_key_service import CustomTargetingKeyServiceClient from .services.custom_targeting_value_service import CustomTargetingValueServiceClient -from .services.label_service import LabelServiceClient -from .services.line_item_service import LineItemServiceClient +from .services.entity_signals_mapping_service import EntitySignalsMappingServiceClient from .services.network_service import NetworkServiceClient from .services.order_service import OrderServiceClient from .services.placement_service import PlacementServiceClient from .services.report_service import ReportServiceClient from .services.role_service import RoleServiceClient -from .services.team_service import TeamServiceClient +from .services.taxonomy_category_service import TaxonomyCategoryServiceClient from .services.user_service import UserServiceClient -from .types.ad_partner_declaration import AdPartnerDeclaration, DeclarationTypeEnum -from .types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, -) -from .types.ad_unit_enums import AppliedAdsenseEnabledEnum +from .types.ad_unit_enums import AdUnitStatusEnum, SmartSizeModeEnum, TargetWindowEnum +from .types.ad_unit_messages import AdUnit, AdUnitParent, AdUnitSize, LabelFrequencyCap from .types.ad_unit_service import ( - AdUnit, - AdUnitParent, GetAdUnitRequest, - LabelFrequencyCap, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from .types.ad_unit_size import AdUnitSize from .types.admanager_error import AdManagerError from .types.applied_label import AppliedLabel from .types.company_credit_status_enum import CompanyCreditStatusEnum +from .types.company_messages import Company from .types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from .types.company_type_enum import CompanyTypeEnum -from .types.computed_status_enum import ComputedStatusEnum -from .types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from .types.creative_placeholder import CreativePlaceholder -from .types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from .types.contact_messages import Contact from .types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) +from .types.custom_field_messages import CustomField, CustomFieldOption from .types.custom_field_service import ( - CustomField, - CustomFieldOption, GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from .types.custom_field_value import CustomFieldValue from .types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) +from .types.custom_targeting_key_messages import CustomTargetingKey from .types.custom_targeting_key_service import ( - CustomTargetingKey, GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -106,103 +79,102 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) +from .types.custom_targeting_value_messages import CustomTargetingValue from .types.custom_targeting_value_service import ( - CustomTargetingValue, GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from .types.environment_type_enum import EnvironmentTypeEnum -from .types.frequency_cap import FrequencyCap, TimeUnitEnum -from .types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from .types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, -) -from .types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from .types.entity_signals_mapping_messages import EntitySignalsMapping +from .types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from .types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, -) -from .types.network_service import GetNetworkRequest, Network -from .types.order_service import ( - GetOrderRequest, - ListOrdersRequest, - ListOrdersResponse, - Order, +from .types.environment_type_enum import EnvironmentTypeEnum +from .types.frequency_cap import FrequencyCap +from .types.label_messages import Label +from .types.network_messages import Network +from .types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) +from .types.order_enums import OrderStatusEnum +from .types.order_messages import Order +from .types.order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse from .types.placement_enums import PlacementStatusEnum +from .types.placement_messages import Placement from .types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from .types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) -from .types.role_service import ( - GetRoleRequest, - ListRolesRequest, - ListRolesResponse, - Role, -) -from .types.size import Size, SizeTypeEnum -from .types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, -) -from .types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from .types.role_enums import RoleStatusEnum +from .types.role_messages import Role +from .types.role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse +from .types.size import Size +from .types.size_type_enum import SizeTypeEnum +from .types.taxonomy_category_messages import TaxonomyCategory +from .types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from .types.taxonomy_type_enum import TaxonomyTypeEnum +from .types.team_messages import Team +from .types.time_unit_enum import TimeUnitEnum +from .types.user_messages import User +from .types.user_service import GetUserRequest __all__ = ( "AdManagerError", - "AdPartner", - "AdPartnerDeclaration", - "AdPartnerServiceClient", "AdUnit", "AdUnitParent", "AdUnitServiceClient", "AdUnitSize", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", "AppliedLabel", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", "Company", "CompanyCreditStatusEnum", "CompanyServiceClient", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "ContactServiceClient", - "Creative", - "CreativePlaceholder", - "CreativeRotationTypeEnum", - "CreativeServiceClient", + "CreateEntitySignalsMappingRequest", + "CreateReportRequest", "CustomField", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldOption", "CustomFieldServiceClient", "CustomFieldStatusEnum", + "CustomFieldValue", "CustomFieldVisibilityEnum", "CustomTargetingKey", "CustomTargetingKeyReportableTypeEnum", @@ -213,89 +185,83 @@ "CustomTargetingValueMatchTypeEnum", "CustomTargetingValueServiceClient", "CustomTargetingValueStatusEnum", - "DeclarationTypeEnum", - "DeliveryRateTypeEnum", + "EntitySignalsMapping", + "EntitySignalsMappingServiceClient", "EnvironmentTypeEnum", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", "FrequencyCap", - "GetAdPartnerRequest", "GetAdUnitRequest", "GetCompanyRequest", - "GetContactRequest", - "GetCreativeRequest", "GetCustomFieldRequest", "GetCustomTargetingKeyRequest", "GetCustomTargetingValueRequest", - "GetLabelRequest", - "GetLineItemRequest", + "GetEntitySignalsMappingRequest", "GetNetworkRequest", "GetOrderRequest", "GetPlacementRequest", + "GetReportRequest", "GetRoleRequest", - "GetTeamRequest", + "GetTaxonomyCategoryRequest", "GetUserRequest", - "Goal", - "GoalTypeEnum", "Label", "LabelFrequencyCap", - "LabelServiceClient", - "LineItem", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemServiceClient", - "LineItemTypeEnum", - "ListAdPartnersRequest", - "ListAdPartnersResponse", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", "ListCompaniesRequest", "ListCompaniesResponse", - "ListContactsRequest", - "ListContactsResponse", - "ListCreativesRequest", - "ListCreativesResponse", "ListCustomFieldsRequest", "ListCustomFieldsResponse", "ListCustomTargetingKeysRequest", "ListCustomTargetingKeysResponse", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", - "ListLabelsRequest", - "ListLabelsResponse", - "ListLineItemsRequest", - "ListLineItemsResponse", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "ListNetworksRequest", + "ListNetworksResponse", "ListOrdersRequest", "ListOrdersResponse", "ListPlacementsRequest", "ListPlacementsResponse", + "ListReportsRequest", + "ListReportsResponse", "ListRolesRequest", "ListRolesResponse", - "ListTeamsRequest", - "ListTeamsResponse", - "ListUsersRequest", - "ListUsersResponse", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", "Network", "NetworkServiceClient", "Order", "OrderServiceClient", + "OrderStatusEnum", "Placement", "PlacementServiceClient", "PlacementStatusEnum", "Report", + "ReportDefinition", "ReportServiceClient", - "ReservationStatusEnum", "Role", "RoleServiceClient", + "RoleStatusEnum", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", "Size", "SizeTypeEnum", "SmartSizeModeEnum", "TargetWindowEnum", + "TaxonomyCategory", + "TaxonomyCategoryServiceClient", + "TaxonomyTypeEnum", "Team", - "TeamServiceClient", "TimeUnitEnum", - "UnitTypeEnum", + "UpdateEntitySignalsMappingRequest", + "UpdateReportRequest", "User", "UserServiceClient", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json index 67680096a5d9..aa173a3cf11e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json @@ -5,25 +5,6 @@ "protoPackage": "google.ads.admanager.v1", "schema": "1.0", "services": { - "AdPartnerService": { - "clients": { - "rest": { - "libraryClient": "AdPartnerServiceClient", - "rpcs": { - "GetAdPartner": { - "methods": [ - "get_ad_partner" - ] - }, - "ListAdPartners": { - "methods": [ - "list_ad_partners" - ] - } - } - } - } - }, "AdUnitService": { "clients": { "rest": { @@ -34,6 +15,11 @@ "get_ad_unit" ] }, + "ListAdUnitSizes": { + "methods": [ + "list_ad_unit_sizes" + ] + }, "ListAdUnits": { "methods": [ "list_ad_units" @@ -62,44 +48,6 @@ } } }, - "ContactService": { - "clients": { - "rest": { - "libraryClient": "ContactServiceClient", - "rpcs": { - "GetContact": { - "methods": [ - "get_contact" - ] - }, - "ListContacts": { - "methods": [ - "list_contacts" - ] - } - } - } - } - }, - "CreativeService": { - "clients": { - "rest": { - "libraryClient": "CreativeServiceClient", - "rpcs": { - "GetCreative": { - "methods": [ - "get_creative" - ] - }, - "ListCreatives": { - "methods": [ - "list_creatives" - ] - } - } - } - } - }, "CustomFieldService": { "clients": { "rest": { @@ -157,38 +105,39 @@ } } }, - "LabelService": { + "EntitySignalsMappingService": { "clients": { "rest": { - "libraryClient": "LabelServiceClient", + "libraryClient": "EntitySignalsMappingServiceClient", "rpcs": { - "GetLabel": { + "BatchCreateEntitySignalsMappings": { "methods": [ - "get_label" + "batch_create_entity_signals_mappings" ] }, - "ListLabels": { + "BatchUpdateEntitySignalsMappings": { "methods": [ - "list_labels" + "batch_update_entity_signals_mappings" ] - } - } - } - } - }, - "LineItemService": { - "clients": { - "rest": { - "libraryClient": "LineItemServiceClient", - "rpcs": { - "GetLineItem": { + }, + "CreateEntitySignalsMapping": { + "methods": [ + "create_entity_signals_mapping" + ] + }, + "GetEntitySignalsMapping": { "methods": [ - "get_line_item" + "get_entity_signals_mapping" ] }, - "ListLineItems": { + "ListEntitySignalsMappings": { "methods": [ - "list_line_items" + "list_entity_signals_mappings" + ] + }, + "UpdateEntitySignalsMapping": { + "methods": [ + "update_entity_signals_mapping" ] } } @@ -204,6 +153,11 @@ "methods": [ "get_network" ] + }, + "ListNetworks": { + "methods": [ + "list_networks" + ] } } } @@ -252,9 +206,34 @@ "rest": { "libraryClient": "ReportServiceClient", "rpcs": { - "ExportSavedReport": { + "CreateReport": { + "methods": [ + "create_report" + ] + }, + "FetchReportResultRows": { + "methods": [ + "fetch_report_result_rows" + ] + }, + "GetReport": { + "methods": [ + "get_report" + ] + }, + "ListReports": { "methods": [ - "export_saved_report" + "list_reports" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + }, + "UpdateReport": { + "methods": [ + "update_report" ] } } @@ -280,19 +259,19 @@ } } }, - "TeamService": { + "TaxonomyCategoryService": { "clients": { "rest": { - "libraryClient": "TeamServiceClient", + "libraryClient": "TaxonomyCategoryServiceClient", "rpcs": { - "GetTeam": { + "GetTaxonomyCategory": { "methods": [ - "get_team" + "get_taxonomy_category" ] }, - "ListTeams": { + "ListTaxonomyCategories": { "methods": [ - "list_teams" + "list_taxonomy_categories" ] } } @@ -308,11 +287,6 @@ "methods": [ "get_user" ] - }, - "ListUsers": { - "methods": [ - "list_users" - ] } } } diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py deleted file mode 100644 index 7a88b4ec84e4..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AdPartnerServiceTransport -from .rest import AdPartnerServiceRestInterceptor, AdPartnerServiceRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AdPartnerServiceTransport]] -_transport_registry["rest"] = AdPartnerServiceRestTransport - -__all__ = ( - "AdPartnerServiceTransport", - "AdPartnerServiceRestTransport", - "AdPartnerServiceRestInterceptor", -) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index f1200c52e05a..c8c6cb4564df 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -55,8 +55,8 @@ from google.ads.admanager_v1.services.ad_unit_service import pagers from google.ads.admanager_v1.types import ( ad_unit_enums, + ad_unit_messages, ad_unit_service, - ad_unit_size, applied_label, ) @@ -732,7 +732,7 @@ def get_ad_unit( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""API to retrieve an AdUnit object. .. code-block:: python @@ -942,6 +942,124 @@ def sample_list_ad_units(): # Done; return the response. return response + def list_ad_unit_sizes( + self, + request: Optional[Union[ad_unit_service.ListAdUnitSizesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdUnitSizesPager: + r"""API to retrieve a list of AdUnitSize objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_ad_unit_sizes(): + # Create a client + client = admanager_v1.AdUnitServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListAdUnitSizesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ad_unit_sizes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListAdUnitSizesRequest, dict]): + The request object. Request object for ListAdUnitSizes + method. + parent (str): + Required. The parent, which owns this collection of + AdUnitSizes. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.ad_unit_service.pagers.ListAdUnitSizesPager: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. Iterating over + this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, ad_unit_service.ListAdUnitSizesRequest): + request = ad_unit_service.ListAdUnitSizesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_ad_unit_sizes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdUnitSizesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AdUnitServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py index 2ae957b1dea0..ebf38bec7995 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service class ListAdUnitsPager: @@ -107,9 +107,83 @@ def pages(self) -> Iterator[ad_unit_service.ListAdUnitsResponse]: ) yield self._response - def __iter__(self) -> Iterator[ad_unit_service.AdUnit]: + def __iter__(self) -> Iterator[ad_unit_messages.AdUnit]: for page in self.pages: yield from page.ad_units def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdUnitSizesPager: + """A pager for iterating through ``list_ad_unit_sizes`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``ad_unit_sizes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdUnitSizes`` requests and continue to iterate + through the ``ad_unit_sizes`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., ad_unit_service.ListAdUnitSizesResponse], + request: ad_unit_service.ListAdUnitSizesRequest, + response: ad_unit_service.ListAdUnitSizesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.ListAdUnitSizesRequest): + The initial request object. + response (google.ads.admanager_v1.types.ListAdUnitSizesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ad_unit_service.ListAdUnitSizesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[ad_unit_service.ListAdUnitSizesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[ad_unit_messages.AdUnitSize]: + for page in self.pages: + yield from page.ad_unit_sizes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py index 948cad87abb1..7852b164a55c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_ad_unit_sizes: gapic_v1.method.wrap_method( + self.list_ad_unit_sizes, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -155,7 +160,7 @@ def get_ad_unit( self, ) -> Callable[ [ad_unit_service.GetAdUnitRequest], - Union[ad_unit_service.AdUnit, Awaitable[ad_unit_service.AdUnit]], + Union[ad_unit_messages.AdUnit, Awaitable[ad_unit_messages.AdUnit]], ]: raise NotImplementedError() @@ -171,6 +176,18 @@ def list_ad_units( ]: raise NotImplementedError() + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + Union[ + ad_unit_service.ListAdUnitSizesResponse, + Awaitable[ad_unit_service.ListAdUnitSizesResponse], + ], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py index c6dd9d86e533..2c1ecebf5b66 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service from .base import AdUnitServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -81,6 +81,14 @@ def post_list_ad_units(self, response): logging.log(f"Received response: {response}") return response + def pre_list_ad_unit_sizes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_ad_unit_sizes(self, response): + logging.log(f"Received response: {response}") + return response + transport = AdUnitServiceRestTransport(interceptor=MyCustomAdUnitServiceInterceptor()) client = AdUnitServiceClient(transport=transport) @@ -100,8 +108,8 @@ def pre_get_ad_unit( return request, metadata def post_get_ad_unit( - self, response: ad_unit_service.AdUnit - ) -> ad_unit_service.AdUnit: + self, response: ad_unit_messages.AdUnit + ) -> ad_unit_messages.AdUnit: """Post-rpc interceptor for get_ad_unit Override in a subclass to manipulate the response @@ -133,6 +141,29 @@ def post_list_ad_units( """ return response + def pre_list_ad_unit_sizes( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ad_unit_service.ListAdUnitSizesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdUnitService server. + """ + return request, metadata + + def post_list_ad_unit_sizes( + self, response: ad_unit_service.ListAdUnitSizesResponse + ) -> ad_unit_service.ListAdUnitSizesResponse: + """Post-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the response + after it is returned by the AdUnitService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -274,7 +305,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""Call the get ad unit method over HTTP. Args: @@ -287,7 +318,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_unit_service.AdUnit: + ~.ad_unit_messages.AdUnit: The AdUnit resource. """ @@ -331,8 +362,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_unit_service.AdUnit() - pb_resp = ad_unit_service.AdUnit.pb(resp) + resp = ad_unit_messages.AdUnit() + pb_resp = ad_unit_messages.AdUnit.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ad_unit(resp) @@ -427,10 +458,101 @@ def __call__( resp = self._interceptor.post_list_ad_units(resp) return resp + class _ListAdUnitSizes(AdUnitServiceRestStub): + def __hash__(self): + return hash("ListAdUnitSizes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ad_unit_service.ListAdUnitSizesResponse: + r"""Call the list ad unit sizes method over HTTP. + + Args: + request (~.ad_unit_service.ListAdUnitSizesRequest): + The request object. Request object for ListAdUnitSizes + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ad_unit_service.ListAdUnitSizesResponse: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/adUnitSizes", + }, + ] + request, metadata = self._interceptor.pre_list_ad_unit_sizes( + request, metadata + ) + pb_request = ad_unit_service.ListAdUnitSizesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ad_unit_service.ListAdUnitSizesResponse() + pb_resp = ad_unit_service.ListAdUnitSizesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_ad_unit_sizes(resp) + return resp + @property def get_ad_unit( self, - ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_service.AdUnit]: + ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_messages.AdUnit]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetAdUnit(self._session, self._host, self._interceptor) # type: ignore @@ -445,6 +567,17 @@ def list_ad_units( # In C++ this would require a dynamic_cast return self._ListAdUnits(self._session, self._host, self._interceptor) # type: ignore + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + ad_unit_service.ListAdUnitSizesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdUnitSizes(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -476,11 +609,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index a1bc3d1c6eab..85b49f138017 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -49,11 +49,13 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.company_service import pagers from google.ads.admanager_v1.types import ( applied_label, company_credit_status_enum, + company_messages, company_service, company_type_enum, ) @@ -753,7 +755,7 @@ def get_company( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""API to retrieve a ``Company`` object. .. code-block:: python @@ -902,7 +904,7 @@ def sample_list_companies(): Returns: google.ads.admanager_v1.services.company_service.pagers.ListCompaniesPager: Response object for ListCompaniesRequest containing matching Company - resources. + objects. Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py index 8dd003e78650..7a1c65b16259 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service class ListCompaniesPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[company_service.ListCompaniesResponse]: ) yield self._response - def __iter__(self) -> Iterator[company_service.Company]: + def __iter__(self) -> Iterator[company_messages.Company]: for page in self.pages: yield from page.companies diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py index 0415f3d70be7..3304a05b29c2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_company( self, ) -> Callable[ [company_service.GetCompanyRequest], - Union[company_service.Company, Awaitable[company_service.Company]], + Union[company_messages.Company, Awaitable[company_messages.Company]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py index 3692c88b6fde..604ec04faf5a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service from .base import CompanyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_company( return request, metadata def post_get_company( - self, response: company_service.Company - ) -> company_service.Company: + self, response: company_messages.Company + ) -> company_messages.Company: """Post-rpc interceptor for get_company Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""Call the get company method over HTTP. Args: @@ -287,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.company_service.Company: + ~.company_messages.Company: The ``Company`` resource. """ @@ -331,8 +331,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = company_service.Company() - pb_resp = company_service.Company.pb(resp) + resp = company_messages.Company() + pb_resp = company_messages.Company.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_company(resp) @@ -374,7 +374,7 @@ def __call__( Returns: ~.company_service.ListCompaniesResponse: Response object for ``ListCompaniesRequest`` containing - matching ``Company`` resources. + matching ``Company`` objects. """ @@ -428,7 +428,7 @@ def __call__( @property def get_company( self, - ) -> Callable[[company_service.GetCompanyRequest], company_service.Company]: + ) -> Callable[[company_service.GetCompanyRequest], company_messages.Company]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetCompany(self._session, self._host, self._interceptor) # type: ignore @@ -474,11 +474,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py deleted file mode 100644 index 20eee0424097..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ContactServiceClient - -__all__ = ("ContactServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py deleted file mode 100644 index 46d892852e64..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py +++ /dev/null @@ -1,986 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.services.contact_service import pagers -from google.ads.admanager_v1.types import contact_service - -from .transports.base import DEFAULT_CLIENT_INFO, ContactServiceTransport -from .transports.rest import ContactServiceRestTransport - - -class ContactServiceClientMeta(type): - """Metaclass for the ContactService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = ( - OrderedDict() - ) # type: Dict[str, Type[ContactServiceTransport]] - _transport_registry["rest"] = ContactServiceRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[ContactServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ContactServiceClient(metaclass=ContactServiceClientMeta): - """Provides methods for handling Contact objects.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContactServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContactServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ContactServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ContactServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def contact_path( - network_code: str, - contact: str, - ) -> str: - """Returns a fully-qualified contact string.""" - return "networks/{network_code}/contacts/{contact}".format( - network_code=network_code, - contact=contact, - ) - - @staticmethod - def parse_contact_path(path: str) -> Dict[str, str]: - """Parses a contact path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/contacts/(?P.+?)$", path - ) - return m.groupdict() if m else {} - - @staticmethod - def network_path( - network_code: str, - ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( - network_code=network_code, - ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn( - "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning, - ) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint( - api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - _default_universe = ContactServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError( - f"mTLS is not supported in any universe other than {_default_universe}." - ) - api_endpoint = ContactServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ContactServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain - ) - return api_endpoint - - @staticmethod - def _get_universe_domain( - client_universe_domain: Optional[str], universe_domain_env: Optional[str] - ) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ContactServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ContactServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ContactServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, ContactServiceTransport, Callable[..., ContactServiceTransport]] - ] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the contact service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ContactServiceTransport,Callable[..., ContactServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ContactServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast( - client_options_lib.ClientOptions, self._client_options - ) - - universe_domain_opt = getattr(self._client_options, "universe_domain", None) - - ( - self._use_client_cert, - self._use_mtls_endpoint, - self._universe_domain_env, - ) = ContactServiceClient._read_environment_variables() - self._client_cert_source = ContactServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert - ) - self._universe_domain = ContactServiceClient._get_universe_domain( - universe_domain_opt, self._universe_domain_env - ) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ContactServiceTransport) - if transport_provided: - # transport is a ContactServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ContactServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = ( - self._api_endpoint - or ContactServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint, - ) - ) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - transport_init: Union[ - Type[ContactServiceTransport], Callable[..., ContactServiceTransport] - ] = ( - ContactServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ContactServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def get_contact( - self, - request: Optional[Union[contact_service.GetContactRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.Contact: - r"""API to retrieve a Contact object. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 - - def sample_get_contact(): - # Create a client - client = admanager_v1.ContactServiceClient() - - # Initialize request argument(s) - request = admanager_v1.GetContactRequest( - name="name_value", - ) - - # Make the request - response = client.get_contact(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.ads.admanager_v1.types.GetContactRequest, dict]): - The request object. Request object for GetContact method. - name (str): - Required. The resource name of the Contact. Format: - ``networks/{network_code}/contacts/{contact_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.ads.admanager_v1.types.Contact: - The Contact resource. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, contact_service.GetContactRequest): - request = contact_service.GetContactRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_contact] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_contacts( - self, - request: Optional[Union[contact_service.ListContactsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListContactsPager: - r"""API to retrieve a list of Contact objects. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 - - def sample_list_contacts(): - # Create a client - client = admanager_v1.ContactServiceClient() - - # Initialize request argument(s) - request = admanager_v1.ListContactsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_contacts(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.ads.admanager_v1.types.ListContactsRequest, dict]): - The request object. Request object for ListContacts - method. - parent (str): - Required. The parent, which owns this collection of - Contacts. Format: ``networks/{network_code}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.ads.admanager_v1.services.contact_service.pagers.ListContactsPager: - Response object for - ListContactsRequest containing matching - Contact resources. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, contact_service.ListContactsRequest): - request = contact_service.ListContactsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_contacts] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListContactsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ContactServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("ContactServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py deleted file mode 100644 index 30f2279d1f01..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import contact_service - - -class ListContactsPager: - """A pager for iterating through ``list_contacts`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListContactsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``contacts`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListContacts`` requests and continue to iterate - through the ``contacts`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListContactsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., contact_service.ListContactsResponse], - request: contact_service.ListContactsRequest, - response: contact_service.ListContactsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListContactsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListContactsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = contact_service.ListContactsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[contact_service.ListContactsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[contact_service.Contact]: - for page in self.pages: - yield from page.contacts - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py deleted file mode 100644 index 4dde7a60bd0f..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ContactServiceTransport -from .rest import ContactServiceRestInterceptor, ContactServiceRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ContactServiceTransport]] -_transport_registry["rest"] = ContactServiceRestTransport - -__all__ = ( - "ContactServiceTransport", - "ContactServiceRestTransport", - "ContactServiceRestInterceptor", -) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py deleted file mode 100644 index c6cbbecff076..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py +++ /dev/null @@ -1,188 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import contact_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class ContactServiceTransport(abc.ABC): - """Abstract transport class for ContactService.""" - - AUTH_SCOPES = () - - DEFAULT_HOST: str = "admanager.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_contact: gapic_v1.method.wrap_method( - self.get_contact, - default_timeout=None, - client_info=client_info, - ), - self.list_contacts: gapic_v1.method.wrap_method( - self.list_contacts, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_contact( - self, - ) -> Callable[ - [contact_service.GetContactRequest], - Union[contact_service.Contact, Awaitable[contact_service.Contact]], - ]: - raise NotImplementedError() - - @property - def list_contacts( - self, - ) -> Callable[ - [contact_service.ListContactsRequest], - Union[ - contact_service.ListContactsResponse, - Awaitable[contact_service.ListContactsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("ContactServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py deleted file mode 100644 index 52a4f962b295..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py +++ /dev/null @@ -1,526 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import contact_service - -from .base import ContactServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ContactServiceRestInterceptor: - """Interceptor for ContactService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ContactServiceRestTransport. - - .. code-block:: python - class MyCustomContactServiceInterceptor(ContactServiceRestInterceptor): - def pre_get_contact(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_contact(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_contacts(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_contacts(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ContactServiceRestTransport(interceptor=MyCustomContactServiceInterceptor()) - client = ContactServiceClient(transport=transport) - - - """ - - def pre_get_contact( - self, - request: contact_service.GetContactRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.GetContactRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_contact - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_contact( - self, response: contact_service.Contact - ) -> contact_service.Contact: - """Post-rpc interceptor for get_contact - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_list_contacts( - self, - request: contact_service.ListContactsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.ListContactsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_contacts - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_list_contacts( - self, response: contact_service.ListContactsResponse - ) -> contact_service.ListContactsResponse: - """Post-rpc interceptor for list_contacts - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ContactServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ContactServiceRestInterceptor - - -class ContactServiceRestTransport(ContactServiceTransport): - """REST backend transport for ContactService. - - Provides methods for handling Contact objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[ContactServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ContactServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetContact(ContactServiceRestStub): - def __hash__(self): - return hash("GetContact") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.GetContactRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.Contact: - r"""Call the get contact method over HTTP. - - Args: - request (~.contact_service.GetContactRequest): - The request object. Request object for GetContact method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.Contact: - The Contact resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/contacts/*}", - }, - ] - request, metadata = self._interceptor.pre_get_contact(request, metadata) - pb_request = contact_service.GetContactRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.Contact() - pb_resp = contact_service.Contact.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_contact(resp) - return resp - - class _ListContacts(ContactServiceRestStub): - def __hash__(self): - return hash("ListContacts") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.ListContactsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.ListContactsResponse: - r"""Call the list contacts method over HTTP. - - Args: - request (~.contact_service.ListContactsRequest): - The request object. Request object for ListContacts - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.ListContactsResponse: - Response object for - ListContactsRequest containing matching - Contact resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/contacts", - }, - ] - request, metadata = self._interceptor.pre_list_contacts(request, metadata) - pb_request = contact_service.ListContactsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.ListContactsResponse() - pb_resp = contact_service.ListContactsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_contacts(resp) - return resp - - @property - def get_contact( - self, - ) -> Callable[[contact_service.GetContactRequest], contact_service.Contact]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetContact(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_contacts( - self, - ) -> Callable[ - [contact_service.ListContactsRequest], contact_service.ListContactsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListContacts(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(ContactServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("ContactServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py deleted file mode 100644 index 33aa7085f6e5..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py +++ /dev/null @@ -1,527 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import creative_service - -from .base import CreativeServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CreativeServiceRestInterceptor: - """Interceptor for CreativeService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CreativeServiceRestTransport. - - .. code-block:: python - class MyCustomCreativeServiceInterceptor(CreativeServiceRestInterceptor): - def pre_get_creative(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_creative(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_creatives(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_creatives(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CreativeServiceRestTransport(interceptor=MyCustomCreativeServiceInterceptor()) - client = CreativeServiceClient(transport=transport) - - - """ - - def pre_get_creative( - self, - request: creative_service.GetCreativeRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.GetCreativeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_creative - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_creative( - self, response: creative_service.Creative - ) -> creative_service.Creative: - """Post-rpc interceptor for get_creative - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_list_creatives( - self, - request: creative_service.ListCreativesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.ListCreativesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_creatives - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_list_creatives( - self, response: creative_service.ListCreativesResponse - ) -> creative_service.ListCreativesResponse: - """Post-rpc interceptor for list_creatives - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CreativeServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CreativeServiceRestInterceptor - - -class CreativeServiceRestTransport(CreativeServiceTransport): - """REST backend transport for CreativeService. - - Provides methods for handling Creative objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CreativeServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CreativeServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetCreative(CreativeServiceRestStub): - def __hash__(self): - return hash("GetCreative") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.GetCreativeRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""Call the get creative method over HTTP. - - Args: - request (~.creative_service.GetCreativeRequest): - The request object. Request object for GetCreative - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.Creative: - The Creative resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/creatives/*}", - }, - ] - request, metadata = self._interceptor.pre_get_creative(request, metadata) - pb_request = creative_service.GetCreativeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.Creative() - pb_resp = creative_service.Creative.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_creative(resp) - return resp - - class _ListCreatives(CreativeServiceRestStub): - def __hash__(self): - return hash("ListCreatives") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.ListCreativesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.ListCreativesResponse: - r"""Call the list creatives method over HTTP. - - Args: - request (~.creative_service.ListCreativesRequest): - The request object. Request object for ListCreatives - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.ListCreativesResponse: - Response object for - ListCreativesRequest containing matching - Creative resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/creatives", - }, - ] - request, metadata = self._interceptor.pre_list_creatives(request, metadata) - pb_request = creative_service.ListCreativesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.ListCreativesResponse() - pb_resp = creative_service.ListCreativesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_creatives(resp) - return resp - - @property - def get_creative( - self, - ) -> Callable[[creative_service.GetCreativeRequest], creative_service.Creative]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCreative(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_creatives( - self, - ) -> Callable[ - [creative_service.ListCreativesRequest], creative_service.ListCreativesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCreatives(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CreativeServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CreativeServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 4b1cd58b89f4..986a135d17c5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -51,7 +51,11 @@ from google.longrunning import operations_pb2 # type: ignore from google.ads.admanager_v1.services.custom_field_service import pagers -from google.ads.admanager_v1.types import custom_field_enums, custom_field_service +from google.ads.admanager_v1.types import ( + custom_field_enums, + custom_field_messages, + custom_field_service, +) from .transports.base import DEFAULT_CLIENT_INFO, CustomFieldServiceTransport from .transports.rest import CustomFieldServiceRestTransport @@ -702,7 +706,7 @@ def get_custom_field( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""API to retrieve a ``CustomField`` object. .. code-block:: python @@ -749,7 +753,9 @@ def sample_get_custom_field(): Returns: google.ads.admanager_v1.types.CustomField: - The CustomField resource. + An additional, user-created field on + an entity. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py index f0a4e63f1c52..b11c6be336cc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service class ListCustomFieldsPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[custom_field_service.ListCustomFieldsResponse]: ) yield self._response - def __iter__(self) -> Iterator[custom_field_service.CustomField]: + def __iter__(self) -> Iterator[custom_field_messages.CustomField]: for page in self.pages: yield from page.custom_fields diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py index 3578065cdf3f..97f76ac909c3 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +156,8 @@ def get_custom_field( ) -> Callable[ [custom_field_service.GetCustomFieldRequest], Union[ - custom_field_service.CustomField, - Awaitable[custom_field_service.CustomField], + custom_field_messages.CustomField, + Awaitable[custom_field_messages.CustomField], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py index 0da154858e92..4994a3e75121 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service from .base import CustomFieldServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_custom_field( return request, metadata def post_get_custom_field( - self, response: custom_field_service.CustomField - ) -> custom_field_service.CustomField: + self, response: custom_field_messages.CustomField + ) -> custom_field_messages.CustomField: """Post-rpc interceptor for get_custom_field Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""Call the get custom field method over HTTP. Args: @@ -287,8 +287,10 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_field_service.CustomField: - The ``CustomField`` resource. + ~.custom_field_messages.CustomField: + An additional, user-created field on + an entity. + """ http_options: List[Dict[str, str]] = [ @@ -333,8 +335,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_field_service.CustomField() - pb_resp = custom_field_service.CustomField.pb(resp) + resp = custom_field_messages.CustomField() + pb_resp = custom_field_messages.CustomField.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_field(resp) @@ -433,7 +435,7 @@ def __call__( def get_custom_field( self, ) -> Callable[ - [custom_field_service.GetCustomFieldRequest], custom_field_service.CustomField + [custom_field_service.GetCustomFieldRequest], custom_field_messages.CustomField ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -481,11 +483,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index 53ee2f5439d2..63992825ffe4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_key_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_key_enums, + custom_targeting_key_messages, custom_targeting_key_service, ) @@ -712,7 +713,7 @@ def get_custom_targeting_key( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""API to retrieve a ``CustomTargetingKey`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py index e56ff58da48c..88953ea7950c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) class ListCustomTargetingKeysPager: @@ -113,7 +116,7 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_key_service.CustomTargetingKey]: + def __iter__(self) -> Iterator[custom_targeting_key_messages.CustomTargetingKey]: for page in self.pages: yield from page.custom_targeting_keys diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py index a55f7a97d634..7e4925dd049f 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_key( ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], Union[ - custom_targeting_key_service.CustomTargetingKey, - Awaitable[custom_targeting_key_service.CustomTargetingKey], + custom_targeting_key_messages.CustomTargetingKey, + Awaitable[custom_targeting_key_messages.CustomTargetingKey], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py index 5ea81bb49e6c..6b9540dc0b60 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) from .base import CustomTargetingKeyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_key( return request, metadata def post_get_custom_targeting_key( - self, response: custom_targeting_key_service.CustomTargetingKey - ) -> custom_targeting_key_service.CustomTargetingKey: + self, response: custom_targeting_key_messages.CustomTargetingKey + ) -> custom_targeting_key_messages.CustomTargetingKey: """Post-rpc interceptor for get_custom_targeting_key Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""Call the get custom targeting key method over HTTP. Args: @@ -293,7 +296,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_key_service.CustomTargetingKey: + ~.custom_targeting_key_messages.CustomTargetingKey: The ``CustomTargetingKey`` resource. """ @@ -341,8 +344,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_key_service.CustomTargetingKey() - pb_resp = custom_targeting_key_service.CustomTargetingKey.pb(resp) + resp = custom_targeting_key_messages.CustomTargetingKey() + pb_resp = custom_targeting_key_messages.CustomTargetingKey.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_key(resp) @@ -447,7 +450,7 @@ def get_custom_targeting_key( self, ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], - custom_targeting_key_service.CustomTargetingKey, + custom_targeting_key_messages.CustomTargetingKey, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -495,11 +498,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index 2a1a0435b1c9..6c03f1fb4c53 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_value_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_value_enums, + custom_targeting_value_messages, custom_targeting_value_service, ) @@ -723,7 +724,7 @@ def get_custom_targeting_value( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""API to retrieve a ``CustomTargetingValue`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py index 09ef836cdb72..214d53becdec 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) class ListCustomTargetingValuesPager: @@ -113,7 +116,9 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_value_service.CustomTargetingValue]: + def __iter__( + self, + ) -> Iterator[custom_targeting_value_messages.CustomTargetingValue]: for page in self.pages: yield from page.custom_targeting_values diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py index a9d002f465f7..0ed99f654001 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_value( ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], Union[ - custom_targeting_value_service.CustomTargetingValue, - Awaitable[custom_targeting_value_service.CustomTargetingValue], + custom_targeting_value_messages.CustomTargetingValue, + Awaitable[custom_targeting_value_messages.CustomTargetingValue], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py index 1154a1bebe68..4706f5043211 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) from .base import CustomTargetingValueServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_value( return request, metadata def post_get_custom_targeting_value( - self, response: custom_targeting_value_service.CustomTargetingValue - ) -> custom_targeting_value_service.CustomTargetingValue: + self, response: custom_targeting_value_messages.CustomTargetingValue + ) -> custom_targeting_value_messages.CustomTargetingValue: """Post-rpc interceptor for get_custom_targeting_value Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""Call the get custom targeting value method over HTTP. @@ -294,7 +297,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_value_service.CustomTargetingValue: + ~.custom_targeting_value_messages.CustomTargetingValue: The ``CustomTargetingValue`` resource. """ @@ -344,8 +347,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_value_service.CustomTargetingValue() - pb_resp = custom_targeting_value_service.CustomTargetingValue.pb(resp) + resp = custom_targeting_value_messages.CustomTargetingValue() + pb_resp = custom_targeting_value_messages.CustomTargetingValue.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_value(resp) @@ -454,7 +457,7 @@ def get_custom_targeting_value( self, ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], - custom_targeting_value_service.CustomTargetingValue, + custom_targeting_value_messages.CustomTargetingValue, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -502,11 +505,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py similarity index 85% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py index 65fa5abb358e..3b03f6d3f9dc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CreativeServiceClient +from .client import EntitySignalsMappingServiceClient -__all__ = ("CreativeServiceClient",) +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py similarity index 53% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py index c04fdc539730..fe94c89d85d2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py @@ -49,17 +49,20 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore -from google.ads.admanager_v1.services.creative_service import pagers -from google.ads.admanager_v1.types import ad_partner_declaration, creative_service +from google.ads.admanager_v1.services.entity_signals_mapping_service import pagers +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) -from .transports.base import DEFAULT_CLIENT_INFO, CreativeServiceTransport -from .transports.rest import CreativeServiceRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, EntitySignalsMappingServiceTransport +from .transports.rest import EntitySignalsMappingServiceRestTransport -class CreativeServiceClientMeta(type): - """Metaclass for the CreativeService client. +class EntitySignalsMappingServiceClientMeta(type): + """Metaclass for the EntitySignalsMappingService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -68,13 +71,13 @@ class CreativeServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[CreativeServiceTransport]] - _transport_registry["rest"] = CreativeServiceRestTransport + ) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] + _transport_registry["rest"] = EntitySignalsMappingServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[CreativeServiceTransport]: + ) -> Type[EntitySignalsMappingServiceTransport]: """Returns an appropriate transport class. Args: @@ -93,8 +96,10 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class CreativeServiceClient(metaclass=CreativeServiceClientMeta): - """Provides methods for handling Creative objects.""" +class EntitySignalsMappingServiceClient( + metaclass=EntitySignalsMappingServiceClientMeta +): + """Provides methods for handling ``EntitySignalsMapping`` objects.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -146,7 +151,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - CreativeServiceClient: The constructed client. + EntitySignalsMappingServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -164,7 +169,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - CreativeServiceClient: The constructed client. + EntitySignalsMappingServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -173,69 +178,32 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> CreativeServiceTransport: + def transport(self) -> EntitySignalsMappingServiceTransport: """Returns the transport used by the client instance. Returns: - CreativeServiceTransport: The transport used by the client + EntitySignalsMappingServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def ad_partner_path( - network_code: str, - ad_partner: str, - ) -> str: - """Returns a fully-qualified ad_partner string.""" - return "networks/{network_code}/adPartners/{ad_partner}".format( - network_code=network_code, - ad_partner=ad_partner, - ) - - @staticmethod - def parse_ad_partner_path(path: str) -> Dict[str, str]: - """Parses a ad_partner path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/adPartners/(?P.+?)$", path - ) - return m.groupdict() if m else {} - - @staticmethod - def company_path( - network_code: str, - company: str, - ) -> str: - """Returns a fully-qualified company string.""" - return "networks/{network_code}/companies/{company}".format( - network_code=network_code, - company=company, - ) - - @staticmethod - def parse_company_path(path: str) -> Dict[str, str]: - """Parses a company path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/companies/(?P.+?)$", path - ) - return m.groupdict() if m else {} - - @staticmethod - def creative_path( + def entity_signals_mapping_path( network_code: str, - creative: str, + entity_signals_mapping: str, ) -> str: - """Returns a fully-qualified creative string.""" - return "networks/{network_code}/creatives/{creative}".format( + """Returns a fully-qualified entity_signals_mapping string.""" + return "networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}".format( network_code=network_code, - creative=creative, + entity_signals_mapping=entity_signals_mapping, ) @staticmethod - def parse_creative_path(path: str) -> Dict[str, str]: - """Parses a creative path into its component segments.""" + def parse_entity_signals_mapping_path(path: str) -> Dict[str, str]: + """Parses a entity_signals_mapping path into its component segments.""" m = re.match( - r"^networks/(?P.+?)/creatives/(?P.+?)$", path + r"^networks/(?P.+?)/entitySignalsMappings/(?P.+?)$", + path, ) return m.groupdict() if m else {} @@ -473,15 +441,17 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = CreativeServiceClient._DEFAULT_UNIVERSE + _default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain + api_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) ) return api_endpoint @@ -501,7 +471,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = CreativeServiceClient._DEFAULT_UNIVERSE + universe_domain = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -527,7 +497,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -551,7 +521,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or CreativeServiceClient._compare_universes( + or EntitySignalsMappingServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -581,13 +551,15 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ Union[ - str, CreativeServiceTransport, Callable[..., CreativeServiceTransport] + str, + EntitySignalsMappingServiceTransport, + Callable[..., EntitySignalsMappingServiceTransport], ] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the creative service client. + """Instantiates the entity signals mapping service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -595,10 +567,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,CreativeServiceTransport,Callable[..., CreativeServiceTransport]]]): + transport (Optional[Union[str,EntitySignalsMappingServiceTransport,Callable[..., EntitySignalsMappingServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the CreativeServiceTransport constructor. + arguments as used in the EntitySignalsMappingServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -651,11 +623,13 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = CreativeServiceClient._read_environment_variables() - self._client_cert_source = CreativeServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert + ) = EntitySignalsMappingServiceClient._read_environment_variables() + self._client_cert_source = ( + EntitySignalsMappingServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) ) - self._universe_domain = CreativeServiceClient._get_universe_domain( + self._universe_domain = EntitySignalsMappingServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) self._api_endpoint = None # updated below, depending on `transport` @@ -672,9 +646,9 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, CreativeServiceTransport) + transport_provided = isinstance(transport, EntitySignalsMappingServiceTransport) if transport_provided: - # transport is a CreativeServiceTransport instance. + # transport is a EntitySignalsMappingServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -685,12 +659,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(CreativeServiceTransport, transport) + self._transport = cast(EntitySignalsMappingServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or CreativeServiceClient._get_api_endpoint( + or EntitySignalsMappingServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -709,11 +683,14 @@ def __init__( ) transport_init: Union[ - Type[CreativeServiceTransport], Callable[..., CreativeServiceTransport] + Type[EntitySignalsMappingServiceTransport], + Callable[..., EntitySignalsMappingServiceTransport], ] = ( - CreativeServiceClient.get_transport_class(transport) + EntitySignalsMappingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast(Callable[..., CreativeServiceTransport], transport) + else cast( + Callable[..., EntitySignalsMappingServiceTransport], transport + ) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -728,16 +705,18 @@ def __init__( api_audience=self._client_options.api_audience, ) - def get_creative( + def get_entity_signals_mapping( self, - request: Optional[Union[creative_service.GetCreativeRequest, dict]] = None, + request: Optional[ + Union[entity_signals_mapping_service.GetEntitySignalsMappingRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""API to retrieve a Creative object. + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to retrieve a ``EntitySignalsMapping`` object. .. code-block:: python @@ -750,28 +729,28 @@ def get_creative( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_get_creative(): + def sample_get_entity_signals_mapping(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.EntitySignalsMappingServiceClient() # Initialize request argument(s) - request = admanager_v1.GetCreativeRequest( + request = admanager_v1.GetEntitySignalsMappingRequest( name="name_value", ) # Make the request - response = client.get_creative(request=request) + response = client.get_entity_signals_mapping(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.GetCreativeRequest, dict]): - The request object. Request object for GetCreative - method. + request (Union[google.ads.admanager_v1.types.GetEntitySignalsMappingRequest, dict]): + The request object. Request object for ``GetEntitySignalsMapping`` method. name (str): - Required. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` + Required. The resource name of the EntitySignalsMapping. + Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -783,8 +762,8 @@ def sample_get_creative(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.types.Creative: - The Creative resource. + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -798,8 +777,12 @@ def sample_get_creative(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, creative_service.GetCreativeRequest): - request = creative_service.GetCreativeRequest(request) + if not isinstance( + request, entity_signals_mapping_service.GetEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.GetEntitySignalsMappingRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -807,7 +790,9 @@ def sample_get_creative(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_creative] + rpc = self._transport._wrapped_methods[ + self._transport.get_entity_signals_mapping + ] # Certain fields should be provided within the metadata header; # add these here. @@ -829,16 +814,18 @@ def sample_get_creative(): # Done; return the response. return response - def list_creatives( + def list_entity_signals_mappings( self, - request: Optional[Union[creative_service.ListCreativesRequest, dict]] = None, + request: Optional[ + Union[entity_signals_mapping_service.ListEntitySignalsMappingsRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCreativesPager: - r"""API to retrieve a list of Creative objects. + ) -> pagers.ListEntitySignalsMappingsPager: + r"""API to retrieve a list of ``EntitySignalsMapping`` objects. .. code-block:: python @@ -851,29 +838,29 @@ def list_creatives( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_list_creatives(): + def sample_list_entity_signals_mappings(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.EntitySignalsMappingServiceClient() # Initialize request argument(s) - request = admanager_v1.ListCreativesRequest( + request = admanager_v1.ListEntitySignalsMappingsRequest( parent="parent_value", ) # Make the request - page_result = client.list_creatives(request=request) + page_result = client.list_entity_signals_mappings(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.ads.admanager_v1.types.ListCreativesRequest, dict]): - The request object. Request object for ListCreatives - method. + request (Union[google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``ListEntitySignalsMappings`` method. parent (str): Required. The parent, which owns this collection of - Creatives. Format: networks/{network_code} + EntitySignalsMappings. Format: + ``networks/{network_code}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -885,14 +872,12 @@ def sample_list_creatives(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.services.creative_service.pagers.ListCreativesPager: - Response object for - ListCreativesRequest containing matching - Creative resources. + google.ads.admanager_v1.services.entity_signals_mapping_service.pagers.ListEntitySignalsMappingsPager: + Response object for ListEntitySignalsMappingsRequest containing matching + EntitySignalsMapping resources. - Iterating over this object will yield - results and resolve additional pages - automatically. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -907,8 +892,12 @@ def sample_list_creatives(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, creative_service.ListCreativesRequest): - request = creative_service.ListCreativesRequest(request) + if not isinstance( + request, entity_signals_mapping_service.ListEntitySignalsMappingsRequest + ): + request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -916,7 +905,9 @@ def sample_list_creatives(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_creatives] + rpc = self._transport._wrapped_methods[ + self._transport.list_entity_signals_mappings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -937,7 +928,7 @@ def sample_list_creatives(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListCreativesPager( + response = pagers.ListEntitySignalsMappingsPager( method=rpc, request=request, response=response, @@ -949,7 +940,547 @@ def sample_list_creatives(): # Done; return the response. return response - def __enter__(self) -> "CreativeServiceClient": + def create_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to create an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_create_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.CreateEntitySignalsMappingRequest( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.create_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + parent (str): + Required. The parent resource where this + EntitySignalsMapping will be created. Format: + ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The EntitySignalsMapping + object to create. + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity_signals_mapping]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to update an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_update_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.update_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The ``EntitySignalsMapping`` to update. + + The EntitySignalsMapping's name is used to identify the + EntitySignalsMapping to update. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}`` + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entity_signals_mapping, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity_signals_mapping.name", request.entity_signals_mapping.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""API to batch create ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_create_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.CreateEntitySignalsMappingRequest() + requests.parent = "parent_value" + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchCreateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be created. Format: + ``networks/{network_code}`` The parent field in the + CreateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + create. A maximum of 100 objects can be created in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsResponse: + Response object for BatchCreateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""API to batch update ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_update_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.UpdateEntitySignalsMappingRequest() + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchUpdateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be updated. Format: + ``networks/{network_code}`` The parent field in the + UpdateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + update. A maximum of 100 objects can be updated in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsResponse: + Response object for BatchUpdateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_update_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EntitySignalsMappingServiceClient": return self def __exit__(self, type, value, traceback): @@ -1025,4 +1556,4 @@ def get_operation( ) -__all__ = ("CreativeServiceClient",) +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py similarity index 69% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py index fda9c7f48045..464c0fe8d515 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py @@ -38,32 +38,37 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import creative_service +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) -class ListCreativesPager: - """A pager for iterating through ``list_creatives`` requests. +class ListEntitySignalsMappingsPager: + """A pager for iterating through ``list_entity_signals_mappings`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListCreativesResponse` object, and + :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` object, and provides an ``__iter__`` method to iterate through its - ``creatives`` field. + ``entity_signals_mappings`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListCreatives`` requests and continue to iterate - through the ``creatives`` field on the + ``ListEntitySignalsMappings`` requests and continue to iterate + through the ``entity_signals_mappings`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListCreativesResponse` + All the usual :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., creative_service.ListCreativesResponse], - request: creative_service.ListCreativesRequest, - response: creative_service.ListCreativesResponse, + method: Callable[ + ..., entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ], + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +79,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListCreativesRequest): + request (google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest): The initial request object. - response (google.ads.admanager_v1.types.ListCreativesResponse): + response (google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +90,9 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = creative_service.ListCreativesRequest(request) + self._request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +102,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[creative_service.ListCreativesResponse]: + def pages( + self, + ) -> Iterator[entity_signals_mapping_service.ListEntitySignalsMappingsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +116,11 @@ def pages(self) -> Iterator[creative_service.ListCreativesResponse]: ) yield self._response - def __iter__(self) -> Iterator[creative_service.Creative]: + def __iter__( + self, + ) -> Iterator[entity_signals_mapping_messages.EntitySignalsMapping]: for page in self.pages: - yield from page.creatives + yield from page.entity_signals_mappings def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py similarity index 60% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py index 0cdd254a8628..a842b7667625 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py @@ -16,15 +16,20 @@ from collections import OrderedDict from typing import Dict, Type -from .base import TeamServiceTransport -from .rest import TeamServiceRestInterceptor, TeamServiceRestTransport +from .base import EntitySignalsMappingServiceTransport +from .rest import ( + EntitySignalsMappingServiceRestInterceptor, + EntitySignalsMappingServiceRestTransport, +) # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TeamServiceTransport]] -_transport_registry["rest"] = TeamServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] +_transport_registry["rest"] = EntitySignalsMappingServiceRestTransport __all__ = ( - "TeamServiceTransport", - "TeamServiceRestTransport", - "TeamServiceRestInterceptor", + "EntitySignalsMappingServiceTransport", + "EntitySignalsMappingServiceRestTransport", + "EntitySignalsMappingServiceRestInterceptor", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py similarity index 63% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/base.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py index 40679501a417..cc29ed2e8641 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py @@ -26,15 +26,18 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class AdPartnerServiceTransport(abc.ABC): - """Abstract transport class for AdPartnerService.""" +class EntitySignalsMappingServiceTransport(abc.ABC): + """Abstract transport class for EntitySignalsMappingService.""" AUTH_SCOPES = () @@ -129,13 +132,33 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_ad_partner: gapic_v1.method.wrap_method( - self.get_ad_partner, + self.get_entity_signals_mapping: gapic_v1.method.wrap_method( + self.get_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.list_entity_signals_mappings: gapic_v1.method.wrap_method( + self.list_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + self.create_entity_signals_mapping: gapic_v1.method.wrap_method( + self.create_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.update_entity_signals_mapping: gapic_v1.method.wrap_method( + self.update_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_create_entity_signals_mappings, default_timeout=None, client_info=client_info, ), - self.list_ad_partners: gapic_v1.method.wrap_method( - self.list_ad_partners, + self.batch_update_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_update_entity_signals_mappings, default_timeout=None, client_info=client_info, ), @@ -151,22 +174,77 @@ def close(self): raise NotImplementedError() @property - def get_ad_partner( + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def list_entity_signals_mappings( self, ) -> Callable[ - [ad_partner_service.GetAdPartnerRequest], - Union[ad_partner_service.AdPartner, Awaitable[ad_partner_service.AdPartner]], + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + Awaitable[entity_signals_mapping_service.ListEntitySignalsMappingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse + ], + ], ]: raise NotImplementedError() @property - def list_ad_partners( + def batch_update_entity_signals_mappings( self, ) -> Callable[ - [ad_partner_service.ListAdPartnersRequest], + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], Union[ - ad_partner_service.ListAdPartnersResponse, - Awaitable[ad_partner_service.ListAdPartnersResponse], + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse + ], ], ]: raise NotImplementedError() @@ -185,4 +263,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("AdPartnerServiceTransport",) +__all__ = ("EntitySignalsMappingServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py new file mode 100644 index 000000000000..a6e1199ef6ef --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EntitySignalsMappingServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EntitySignalsMappingServiceRestInterceptor: + """Interceptor for EntitySignalsMappingService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EntitySignalsMappingServiceRestTransport. + + .. code-block:: python + class MyCustomEntitySignalsMappingServiceInterceptor(EntitySignalsMappingServiceRestInterceptor): + def pre_batch_create_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_update_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EntitySignalsMappingServiceRestTransport(interceptor=MyCustomEntitySignalsMappingServiceInterceptor()) + client = EntitySignalsMappingServiceClient(transport=transport) + + + """ + + def pre_batch_create_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_create_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_batch_update_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_update_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_create_entity_signals_mapping( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_create_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_entity_signals_mapping( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.GetEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_list_entity_signals_mappings( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_list_entity_signals_mappings( + self, response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + """Post-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_update_entity_signals_mapping( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_update_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EntitySignalsMappingServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EntitySignalsMappingServiceRestInterceptor + + +class EntitySignalsMappingServiceRestTransport(EntitySignalsMappingServiceTransport): + """REST backend transport for EntitySignalsMappingService. + + Provides methods for handling ``EntitySignalsMapping`` objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "admanager.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EntitySignalsMappingServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'admanager.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EntitySignalsMappingServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchCreateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""Call the batch create entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + Response object for ``BatchCreateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchCreate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_create_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_entity_signals_mappings(resp) + return resp + + class _BatchUpdateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchUpdateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""Call the batch update entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + Response object for ``BatchUpdateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchUpdate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_update_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_update_entity_signals_mappings(resp) + return resp + + class _CreateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("CreateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the create entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.CreateEntitySignalsMappingRequest): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_create_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_entity_signals_mapping(resp) + return resp + + class _GetEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("GetEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the get entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.GetEntitySignalsMappingRequest): + The request object. Request object for ``GetEntitySignalsMapping`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/entitySignalsMappings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_entity_signals_mapping(resp) + return resp + + class _ListEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("ListEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + r"""Call the list entity signals + mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.ListEntitySignalsMappingsRequest): + The request object. Request object for ``ListEntitySignalsMappings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + Response object for ``ListEntitySignalsMappingsRequest`` + containing matching ``EntitySignalsMapping`` resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + }, + ] + request, metadata = self._interceptor.pre_list_entity_signals_mappings( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + pb_resp = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entity_signals_mappings(resp) + return resp + + class _UpdateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("UpdateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the update entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.UpdateEntitySignalsMappingRequest): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{entity_signals_mapping.name=networks/*/entitySignalsMappings/*}", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_update_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_entity_signals_mapping(resp) + return resp + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EntitySignalsMappingServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EntitySignalsMappingServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py deleted file mode 100644 index 2944d1a2145f..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LabelServiceClient - -__all__ = ("LabelServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py deleted file mode 100644 index ee9d7253b492..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py +++ /dev/null @@ -1,977 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.services.label_service import pagers -from google.ads.admanager_v1.types import label_service - -from .transports.base import DEFAULT_CLIENT_INFO, LabelServiceTransport -from .transports.rest import LabelServiceRestTransport - - -class LabelServiceClientMeta(type): - """Metaclass for the LabelService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[LabelServiceTransport]] - _transport_registry["rest"] = LabelServiceRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[LabelServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LabelServiceClient(metaclass=LabelServiceClientMeta): - """Provides methods for handling Label objects.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LabelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LabelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LabelServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LabelServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def label_path( - network_code: str, - label: str, - ) -> str: - """Returns a fully-qualified label string.""" - return "networks/{network_code}/labels/{label}".format( - network_code=network_code, - label=label, - ) - - @staticmethod - def parse_label_path(path: str) -> Dict[str, str]: - """Parses a label path into its component segments.""" - m = re.match(r"^networks/(?P.+?)/labels/(?P